dogs vs cat

In [1]:
import zipfile, os, shutil, re
from os.path import isfile, isdir
import random
from tqdm import tqdm
import cv2
import numpy as np
from keras.preprocessing import image
from keras.applications.inception_v3 import InceptionV3
from keras.applications.inception_v3 import preprocess_input as inception_input
from keras.applications.resnet50 import ResNet50,decode_predictions
from keras.applications.resnet50 import preprocess_input as resnet_input
from keras.applications.xception import Xception
# from keras.preprocessing.image import ImageDataGenerator
from keras import optimizers
from keras.models import Model
# from keras.models import Sequential
from keras.layers import Convolution2D, MaxPooling2D, Activation, Dropout, Flatten, Dense,GlobalAveragePooling2D,Input
from keras.callbacks import EarlyStopping, ModelCheckpoint, CSVLogger

import matplotlib.pyplot as plt
import matplotlib.image as mpimg
%matplotlib inline

# extract train.zip image to folder 'train'
if not isdir('train'):
    zipFile = zipfile.ZipFile('train.zip')
    zipFile.extractall()
    zipFile.close()
    
# move cat's image to folder 'train/cat', dog's image to folder 'train/dog'
if not isdir('train/cat'):
    os.mkdir('train/cat')        # create folder train/cat
if not isdir('train/dog'):
    os.mkdir('train/dog')        # create folder train/dog
    
# search files in train folder, check whether it match cat or dog
for filename in os.listdir('train'):
    match_cat = re.search(r'cat.[0-9]*.jpg',filename)
    if match_cat:           # if filename including cat then move to folder train/cat
        shutil.move('train/'+filename, 'train/cat/')   
    
    match_dog = re.search(r'dog.[0-9]*.jpg',filename)
    if match_dog:           # if filename including dog then move to folder train/dog
        shutil.move('train/'+filename, 'train/dog/'+filename)
        
# extract test.zip image to folder 'test'
if not isdir('test'):
    zipFile = zipfile.ZipFile('test.zip')
    zipFile.extractall()
    zipFile.close()
    
if not isdir('test/test'):
    os.mkdir('test/test')        # if not exist test/test then create the sub-folder 'test/test'
    for filename in os.listdir('test'):      # search files in 'test', then move to sub-folder 'test/test'
        if not os.path.isdir(filename):      
            shutil.move('test/'+filename,'test/test/')
C:\Users\thomas_gui\AppData\Local\conda\conda\envs\py36\lib\site-packages\h5py\__init__.py:34: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters
Using TensorFlow backend.
In [40]:
# Show cat picture of train
fig1 = plt.figure(figsize=(20,10))  # figure size=(20,10)
for i in range(10):
    img = mpimg.imread('train/cat/cat.%i.jpg'%i)
    print('cat.%i.jpg:'%i, 'dimension', img.shape)
    plt.subplot(2,5,i+1)
    plt.title('cat.%i.jpg'%i)
    plt.imshow(img)
plt.show()

# show dog picture of train
fig2 = plt.figure(figsize=(20,10))  # figure size=(20,10)
for i in range(10):
    img = mpimg.imread('train/dog/dog.%i.jpg'%i)
    print('dog.%i.jpg:'%i, 'dimension', img.shape)
    plt.subplot(2,5,i+1)
    plt.title('dog.%i.jpg'%i)
    plt.imshow(img)
plt.show()
cat.0.jpg: dimension (374, 500, 3)
cat.1.jpg: dimension (280, 300, 3)
cat.2.jpg: dimension (396, 312, 3)
cat.3.jpg: dimension (414, 500, 3)
cat.4.jpg: dimension (375, 499, 3)
cat.5.jpg: dimension (144, 175, 3)
cat.6.jpg: dimension (303, 400, 3)
cat.7.jpg: dimension (499, 495, 3)
cat.8.jpg: dimension (345, 461, 3)
cat.9.jpg: dimension (425, 320, 3)
dog.0.jpg: dimension (375, 499, 3)
dog.1.jpg: dimension (499, 327, 3)
dog.2.jpg: dimension (199, 187, 3)
dog.3.jpg: dimension (375, 499, 3)
dog.4.jpg: dimension (287, 300, 3)
dog.5.jpg: dimension (376, 499, 3)
dog.6.jpg: dimension (488, 499, 3)
dog.7.jpg: dimension (264, 299, 3)
dog.8.jpg: dimension (500, 469, 3)
dog.9.jpg: dimension (500, 368, 3)
In [41]:
# show test picture
fig3 = plt.figure(figsize=(20,10))  # figure size=(20,10)
for i in range(10):
    i +=1
    img = mpimg.imread('test/test/%i.jpg'%i)
    print('%i.jpg:'%i, 'dimension', img.shape)
    plt.subplot(2,5,i)
    plt.title('%i.jpg'%i)
    plt.imshow(img)
plt.show()
1.jpg: dimension (499, 381, 3)
2.jpg: dimension (299, 296, 3)
3.jpg: dimension (299, 300, 3)
4.jpg: dimension (288, 374, 3)
5.jpg: dimension (400, 399, 3)
6.jpg: dimension (375, 499, 3)
7.jpg: dimension (148, 192, 3)
8.jpg: dimension (234, 325, 3)
9.jpg: dimension (380, 480, 3)
10.jpg: dimension (374, 500, 3)
In [35]:
# from imagenet, dogs class, cats class list
dogs = [
 'n02085620','n02085782','n02085936','n02086079'
,'n02086240','n02086646','n02086910','n02087046'
,'n02087394','n02088094','n02088238','n02088364'
,'n02088466','n02088632','n02089078','n02089867'
,'n02089973','n02090379','n02090622','n02090721'
,'n02091032','n02091134','n02091244','n02091467'
,'n02091635','n02091831','n02092002','n02092339'
,'n02093256','n02093428','n02093647','n02093754'
,'n02093859','n02093991','n02094114','n02094258'
,'n02094433','n02095314','n02095570','n02095889'
,'n02096051','n02096177','n02096294','n02096437'
,'n02096585','n02097047','n02097130','n02097209'
,'n02097298','n02097474','n02097658','n02098105'
,'n02098286','n02098413','n02099267','n02099429'
,'n02099601','n02099712','n02099849','n02100236'
,'n02100583','n02100735','n02100877','n02101006'
,'n02101388','n02101556','n02102040','n02102177'
,'n02102318','n02102480','n02102973','n02104029'
,'n02104365','n02105056','n02105162','n02105251'
,'n02105412','n02105505','n02105641','n02105855'
,'n02106030','n02106166','n02106382','n02106550'
,'n02106662','n02107142','n02107312','n02107574'
,'n02107683','n02107908','n02108000','n02108089'
,'n02108422','n02108551','n02108915','n02109047'
,'n02109525','n02109961','n02110063','n02110185'
,'n02110341','n02110627','n02110806','n02110958'
,'n02111129','n02111277','n02111500','n02111889'
,'n02112018','n02112137','n02112350','n02112706'
,'n02113023','n02113186','n02113624','n02113712'
,'n02113799','n02113978']

cats=[
'n02123045','n02123159','n02123394','n02123597'
,'n02124075','n02125311','n02127052']
In [4]:
# generate batchs for ResNet50 predict_generator
def test_gen_224(X, batch_size): 
    
    datalen = len(X)
    counter = datalen//batch_size
    while (True):
        
        for i in range(counter):
            X_224 = np.zeros((batch_size,224,224,3),dtype = np.float)
            for j in range(batch_size):
                X_224[j] = cv2.resize(cv2.imread(X[i* batch_size+j]),(224,224))
                
            X_224 = resnet_input(X_224)
            yield X_224
In [5]:
# use ResNet50 to predict picture class
# add non cat picture to cat_outlier list
# add non dog picture to dog_outlier list
# cat and dog train file list
X_catfile = ['train/cat/cat.%d.jpg' % i for i in range(12500)]
X_dogfile = ['train/dog/dog.%d.jpg' % i for i in range(12500)]

model = ResNet50(weights='imagenet')
cat_preds = model.predict_generator(test_gen_224(X_catfile,100),steps=125)
dog_preds = model.predict_generator(test_gen_224(X_dogfile,100),steps=125)

cat_outlier = []
dog_outlier = []
for i in tqdm(range(12500)):
    iscat = False
    isdog = False
    cat_prediction = decode_predictions(cat_preds[i:(i+1),:], top=30)[0]
    dog_prediction = decode_predictions(dog_preds[i:(i+1),:], top=30)[0] 
    for j in range(30):
        if cat_prediction[j][0] in cats:
            iscat = True
            break
    for j in range(30):        
        if dog_prediction[j][0] in dogs:
            isdog = True
            break
            
    if not iscat:
        cat_outlier.append(X_catfile[i])     # append cat picture outlier to list
    if not isdog:
        dog_outlier.append(X_dogfile[i])     # append cat picture outlier to list

# print('Number of outliers for cat: ',len(cat_outlier))
# print('Number of outliers for dog: ',len(dog_outlier))
100%|██████████████████████████████████████████████████████████████████████████| 12500/12500 [00:01<00:00, 6876.19it/s]
In [6]:
# use InceptionV3 to predict picture class
# choose non cat or non dog picture from previous list

cat_outlier1 = []
dog_outlier1 = []
model = InceptionV3(weights='imagenet')

for i in tqdm(range(len(cat_outlier))):
    img = image.load_img(cat_outlier[i], target_size=(299, 299))
    x = image.img_to_array(img)
    x = np.expand_dims(x, axis=0)
    x = inception_input(x)
    cat_preds = model.predict(x)
    cat_prediction = decode_predictions(cat_preds, top=30)[0]
    
    iscat = False
    for j in range(30):
        if cat_prediction[j][0] in cats:
            iscat = True
            break
    if not iscat:
        cat_outlier1.append(cat_outlier[i])

for i in tqdm(range(len(dog_outlier))):
    img = image.load_img(dog_outlier[i], target_size=(299, 299))
    x = image.img_to_array(img)
    x = np.expand_dims(x, axis=0)
    x = inception_input(x)
    dog_preds = model.predict(x)
    dog_prediction = decode_predictions(dog_preds, top=30)[0]
    
    isdog = False
    for j in range(30):
        if dog_prediction[j][0] in dogs:
            isdog = True
            break
    if not isdog:
        dog_outlier1.append(dog_outlier[i])
        
# print('Number of outliers for cat: ',len(cat_outlier1))
# print('Number of outliers for dog: ',len(dog_outlier1))
330
56
100%|████████████████████████████████████████████████████████████████████████████████| 330/330 [00:10<00:00, 30.35it/s]
100%|██████████████████████████████████████████████████████████████████████████████████| 56/56 [00:01<00:00, 32.29it/s]
84
21
In [7]:
# use Xception to predict picture class
# choose non cat or non dog picture from previous list

cat_outlier = []
dog_outlier = []
model = Xception(weights='imagenet')

for i in tqdm(range(len(cat_outlier1))):
    img = image.load_img(cat_outlier1[i], target_size=(299, 299))
    x = image.img_to_array(img)
    x = np.expand_dims(x, axis=0)
    x = inception_input(x)
    cat_preds = model.predict(x)
    cat_prediction = decode_predictions(cat_preds, top=30)[0]
    
    iscat = False
    for j in range(30):
        if cat_prediction[j][0] in cats:
            iscat = True
            break
    if not iscat:
        cat_outlier.append(cat_outlier1[i])

for i in tqdm(range(len(dog_outlier1))):
    img = image.load_img(dog_outlier1[i], target_size=(299, 299))
    x = image.img_to_array(img)
    x = np.expand_dims(x, axis=0)
    x = inception_input(x)
    dog_preds = model.predict(x)
    dog_prediction = decode_predictions(dog_preds, top=30)[0]
    
    isdog = False
    for j in range(30):
        if dog_prediction[j][0] in dogs:
            isdog = True
            break
    if not isdog:
        dog_outlier.append(dog_outlier1[i])
        
# print('Number of outliers for cat: ',len(cat_outlier))
# print('Number of outliers for dog: ',len(dog_outlier))
100%|██████████████████████████████████████████████████████████████████████████████████| 84/84 [00:02<00:00, 28.88it/s]
100%|██████████████████████████████████████████████████████████████████████████████████| 21/21 [00:00<00:00, 35.87it/s]
In [9]:
print('Number of outliers for cat: ',len(cat_outlier))

# print('non cat list:\n',cat_outlier)
fig1 = plt.figure(figsize=(20,40))  # figure size=(20,10)
for i in range(len(cat_outlier)):
    img = mpimg.imread(cat_outlier[i])
    plt.subplot(10,5,i+1)
    plt.title(cat_outlier[i])
    plt.imshow(img)
plt.show()
Number of outliers for cat:  46
In [10]:
print('Number of outliers for dog: ',len(dog_outlier))

fig2 = plt.figure(figsize=(20,20))  # figure size=(20,10)
for i in range(len(dog_outlier)):
    img = mpimg.imread(dog_outlier[i])
    plt.subplot(4,5,i+1)
    plt.title(dog_outlier[i])
    plt.imshow(img)
plt.show()
Number of outliers for cat:  18
In [2]:
# define random_transfer function
# used these functions for data argumentation

def random_crop(img, ratio=1.0):
    #随机剪裁, Ratio是裁剪后与裁剪前的面积比
    if random.random() < 1:
        h,w = img.shape[:2]
        #长宽比随机扰动变量 hw_delta
        hw_delta = np.random.uniform(-0.05,0.05)
        hw_mult = 1 + hw_delta
        
        w_crop = int(round(w*np.sqrt(ratio*hw_mult)))
        
        if w_crop > w:
            w_crop = w
        
        h_crop = int(round(h*np.sqrt(ratio/hw_mult)))
        if h_crop > h:
            h_crop = h
        
        x0 = np.random.randint(0,w-w_crop+1)
        y0 = np.random.randint(0,h-h_crop+1)
        
        img = img[y0:y0+h_crop,x0:x0+w_crop]
        
    return img
    
def random_rotate(img, angle=0.0):
    if random.random() < 1:
        h,w = img.shape[:2]
        # angle = np.random.uniform(-angle,angle)
        # angle cycle is 360
        angle %= 360
        # 用openCV 内置函数计算仿射矩阵
        M_rotate = cv2.getRotationMatrix2D((w/2,h/2),angle,1)
        # 得到旋转后的图片
        img = cv2.warpAffine(img, M_rotate, (w, h))
        angle_crop = angle % 180
        
        if angle_crop > 90:
            angle_crop = 180 - angle_crop
        # convert to rad
        theta = angle_crop * np.pi / 180.0
        # compute high width ratio
        hw_ratio = float(h) / float(w)
        
        tan_theta = np.tan(theta)
        numerator = np.cos(theta) + np.sin(theta) * tan_theta
        
        # 计算分母项中的宽高比相关的项
        r = hw_ratio if h > w else 1/hw_ratio
        # 计算分母项
        denominator = r * tan_theta +1
        # 计算最终边长系数
        crop_mult = numerator / denominator
        # 得到剪裁区域
        w_crop = int(round(crop_mult*w))
        h_crop = int(round(crop_mult*h))
        x0 = int((w-w_crop)/2)
        y0 = int((h-h_crop)/2)
        img = img[y0:y0+h_crop,x0:x0+w_crop]
    return img

def random_shear(img,angle=0.0):
    if random.random() < 1:
        h,w = img.shape[:2]
        #产生随机剪切角度值
        # angle = np.random.uniform(-angle,angle)
        # x轴剪切变换,角度值=angle
        theta = angle * np.pi / 180
        M_shear = np.array([[1,np.tan(theta), 0],[0,1,0]],dtype = np.float32)
        img = cv2.warpAffine(img, M_shear, (w, h))
    return img

def random_h_flip(img, horizontal_flip=False):
    if horizontal_flip == True:
        if random.random() < 1:
            img = cv2.flip(img, 1)
    return img
            
def random_v_flip(img, vertical_flip=False):
    if vertical_flip == True:
        if random.random() < 1:
            img = cv2.flip(img,0)
    return img    

def random_transfer(img, ratio=None,rotate=None,shear=None,
                  horizontal_flip=False,vertical_flip=False):
    params = {}
    if ratio is not None:
        params['ratio'] = ratio
    if rotate is not None:
        params['rotate'] = rotate
    if shear is not None:
        params['shear'] = shear
    if horizontal_flip:
        params['horizontal_flip'] = horizontal_flip
    if vertical_flip:
        params['vertical_flip'] = vertical_flip
        
    if params:
        func = []
        for param in params.keys():
            func.append(param)
    func = random.choice(func)
    # print(func,params[func])
    
    function_map = { 'ratio':random_crop,
                     'rotate':random_rotate,
                     'shear':random_shear,
                     'horizontal_flip':random_h_flip,
                     'vertical_flip':random_v_flip
                    }
    
    img_transfer = function_map[func](img,params[func])
    return img_transfer
In [18]:
# random choose a existing image, excute random transfer, replace each outlier image
for i in range(len(cat_outlier)):
    cat_file = random.choice(X_catfile)
    img_cat = cv2.imread(cat_file)
    img_cat = random_transfer(img_cat, 0.9,10,10,True,True)
    cv2.imwrite(cat_outlier[i],img_cat)
    
for i in range(len(dog_outlier)):
    dog_file = random.choice(X_dogfile)
    img_dog = cv2.imread(dog_file)
    img_dog = random_transfer(img_dog, 0.9,10,10,True,True)
    cv2.imwrite(dog_outlier[i],img_dog)
In [4]:
X_catfile = ['train/cat/cat.%d.jpg' % i for i in range(12500)]
X_dogfile = ['train/dog/dog.%d.jpg' % i for i in range(12500)]
# set the validation split = 0.2
# then split train_set into X_train,X_val, y_train, y_val
splitpoint = int(12500 * 0.8)
# to keep cat and dog file samples balance in train, validation set.
# split 10,000 cat file list into train, 2,500 cat file list into validation
X_catfile_train,X_catfile_val = X_catfile[:splitpoint],X_catfile[splitpoint:]
# split 10,000 dog file list into train, 2,500 dog file list into validation
X_dogfile_train,X_dogfile_val = X_dogfile[:splitpoint],X_dogfile[splitpoint:]
# merge cat and dog train set, total 20,000 file list, first 10,000 is cat file, next 10,000 is dog file
X_train = np.array(X_catfile_train + X_dogfile_train)
# generate train label, first 10,000 is cat file, label = 0 , then next 10,000 label = 1

y_train = np.array([0 for i in range(splitpoint)] + [1 for i in range(splitpoint)])
# same as above operation, merge cat cat and dog validation set, then generate validation label
X_val = np.array(X_catfile_val + X_dogfile_val)
y_val = np.array([0 for i in range(splitpoint,12500)] + [1 for i in range(splitpoint,12500)])

# shuffle train & validation set
# set index for train and validation set
train_idx = np.arange(20000)
val_idx = np.arange(5000)
# set random seed to make sure shuffle result not change everytime
random.seed(2018)
# shuffle train index
random.shuffle(train_idx)

random.seed(2018)
# shuffle validation index
random.shuffle(val_idx)
# re-arrange X_train,y_train by shuffled index
X_train,y_train = X_train[train_idx],y_train[train_idx]
X_val,y_val = X_val[val_idx],y_val[val_idx]
#print(y_train.sum())    #half sample list is cat,another one is dog, so train label sum = 10,000
In [5]:
batch_size = 50
epochs = 10

# define train_generator function, yield batchs of train and validation data 
def train_generator(X, y, batch_size): 
    
    datalen = len(y)
    counter = datalen//batch_size
    while (True):
        
        for i in range(counter):
            X_299 = np.zeros((batch_size,299,299,3),dtype = np.float)
            for j in range(batch_size):
                X_299[j] = cv2.resize(cv2.imread(X[i* batch_size+j]),(299,299))
                
            X_299 = inception_input(X_299)
            yield X_299, y[i * batch_size:(i + 1) * batch_size]

def val_generator(X, y, batch_size):
    
    datalen = len(y)
    counter = datalen//batch_size
    while (True):
        
        for i in range(counter):
            X_299 = np.zeros((batch_size,299,299,3),dtype = np.float)
            for j in range(batch_size):
                X_299[j] = cv2.resize(cv2.imread(X[i* batch_size+j]),(299,299))
                
            X_299 = inception_input(X_299)
            yield X_299, y[i * batch_size:(i + 1) * batch_size]
In [10]:
# from IPython.display import SVG
# from keras.utils.vis_utils import model_to_dot
# from keras.utils import plot_model

model_InceptionV3 = InceptionV3(include_top = False, weights = 'imagenet', input_shape=(299,299,3))
for layer in model_InceptionV3.layers:
    layer.trainable = False
model = GlobalAveragePooling2D() (model_InceptionV3.output)

model = Dropout(0.5)(model)
model = Dense(1, activation='sigmoid')(model)
model_InceptionV3 = Model(model_InceptionV3.input,model, name = 'model_InceptionV3')

# plot_model(model_InceptionV3_GAP, to_file='model_InceptionV3.png',show_shapes = True) 
# SVG(model_to_dot(model_InceptionV3).create(prog='dot', format='svg'))

model_InceptionV3.summary()
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_3 (InputLayer)            (None, 299, 299, 3)  0                                            
__________________________________________________________________________________________________
conv2d_189 (Conv2D)             (None, 149, 149, 32) 864         input_3[0][0]                    
__________________________________________________________________________________________________
batch_normalization_189 (BatchN (None, 149, 149, 32) 96          conv2d_189[0][0]                 
__________________________________________________________________________________________________
activation_189 (Activation)     (None, 149, 149, 32) 0           batch_normalization_189[0][0]    
__________________________________________________________________________________________________
conv2d_190 (Conv2D)             (None, 147, 147, 32) 9216        activation_189[0][0]             
__________________________________________________________________________________________________
batch_normalization_190 (BatchN (None, 147, 147, 32) 96          conv2d_190[0][0]                 
__________________________________________________________________________________________________
activation_190 (Activation)     (None, 147, 147, 32) 0           batch_normalization_190[0][0]    
__________________________________________________________________________________________________
conv2d_191 (Conv2D)             (None, 147, 147, 64) 18432       activation_190[0][0]             
__________________________________________________________________________________________________
batch_normalization_191 (BatchN (None, 147, 147, 64) 192         conv2d_191[0][0]                 
__________________________________________________________________________________________________
activation_191 (Activation)     (None, 147, 147, 64) 0           batch_normalization_191[0][0]    
__________________________________________________________________________________________________
max_pooling2d_9 (MaxPooling2D)  (None, 73, 73, 64)   0           activation_191[0][0]             
__________________________________________________________________________________________________
conv2d_192 (Conv2D)             (None, 73, 73, 80)   5120        max_pooling2d_9[0][0]            
__________________________________________________________________________________________________
batch_normalization_192 (BatchN (None, 73, 73, 80)   240         conv2d_192[0][0]                 
__________________________________________________________________________________________________
activation_192 (Activation)     (None, 73, 73, 80)   0           batch_normalization_192[0][0]    
__________________________________________________________________________________________________
conv2d_193 (Conv2D)             (None, 71, 71, 192)  138240      activation_192[0][0]             
__________________________________________________________________________________________________
batch_normalization_193 (BatchN (None, 71, 71, 192)  576         conv2d_193[0][0]                 
__________________________________________________________________________________________________
activation_193 (Activation)     (None, 71, 71, 192)  0           batch_normalization_193[0][0]    
__________________________________________________________________________________________________
max_pooling2d_10 (MaxPooling2D) (None, 35, 35, 192)  0           activation_193[0][0]             
__________________________________________________________________________________________________
conv2d_197 (Conv2D)             (None, 35, 35, 64)   12288       max_pooling2d_10[0][0]           
__________________________________________________________________________________________________
batch_normalization_197 (BatchN (None, 35, 35, 64)   192         conv2d_197[0][0]                 
__________________________________________________________________________________________________
activation_197 (Activation)     (None, 35, 35, 64)   0           batch_normalization_197[0][0]    
__________________________________________________________________________________________________
conv2d_195 (Conv2D)             (None, 35, 35, 48)   9216        max_pooling2d_10[0][0]           
__________________________________________________________________________________________________
conv2d_198 (Conv2D)             (None, 35, 35, 96)   55296       activation_197[0][0]             
__________________________________________________________________________________________________
batch_normalization_195 (BatchN (None, 35, 35, 48)   144         conv2d_195[0][0]                 
__________________________________________________________________________________________________
batch_normalization_198 (BatchN (None, 35, 35, 96)   288         conv2d_198[0][0]                 
__________________________________________________________________________________________________
activation_195 (Activation)     (None, 35, 35, 48)   0           batch_normalization_195[0][0]    
__________________________________________________________________________________________________
activation_198 (Activation)     (None, 35, 35, 96)   0           batch_normalization_198[0][0]    
__________________________________________________________________________________________________
average_pooling2d_19 (AveragePo (None, 35, 35, 192)  0           max_pooling2d_10[0][0]           
__________________________________________________________________________________________________
conv2d_194 (Conv2D)             (None, 35, 35, 64)   12288       max_pooling2d_10[0][0]           
__________________________________________________________________________________________________
conv2d_196 (Conv2D)             (None, 35, 35, 64)   76800       activation_195[0][0]             
__________________________________________________________________________________________________
conv2d_199 (Conv2D)             (None, 35, 35, 96)   82944       activation_198[0][0]             
__________________________________________________________________________________________________
conv2d_200 (Conv2D)             (None, 35, 35, 32)   6144        average_pooling2d_19[0][0]       
__________________________________________________________________________________________________
batch_normalization_194 (BatchN (None, 35, 35, 64)   192         conv2d_194[0][0]                 
__________________________________________________________________________________________________
batch_normalization_196 (BatchN (None, 35, 35, 64)   192         conv2d_196[0][0]                 
__________________________________________________________________________________________________
batch_normalization_199 (BatchN (None, 35, 35, 96)   288         conv2d_199[0][0]                 
__________________________________________________________________________________________________
batch_normalization_200 (BatchN (None, 35, 35, 32)   96          conv2d_200[0][0]                 
__________________________________________________________________________________________________
activation_194 (Activation)     (None, 35, 35, 64)   0           batch_normalization_194[0][0]    
__________________________________________________________________________________________________
activation_196 (Activation)     (None, 35, 35, 64)   0           batch_normalization_196[0][0]    
__________________________________________________________________________________________________
activation_199 (Activation)     (None, 35, 35, 96)   0           batch_normalization_199[0][0]    
__________________________________________________________________________________________________
activation_200 (Activation)     (None, 35, 35, 32)   0           batch_normalization_200[0][0]    
__________________________________________________________________________________________________
mixed0 (Concatenate)            (None, 35, 35, 256)  0           activation_194[0][0]             
                                                                 activation_196[0][0]             
                                                                 activation_199[0][0]             
                                                                 activation_200[0][0]             
__________________________________________________________________________________________________
conv2d_204 (Conv2D)             (None, 35, 35, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
batch_normalization_204 (BatchN (None, 35, 35, 64)   192         conv2d_204[0][0]                 
__________________________________________________________________________________________________
activation_204 (Activation)     (None, 35, 35, 64)   0           batch_normalization_204[0][0]    
__________________________________________________________________________________________________
conv2d_202 (Conv2D)             (None, 35, 35, 48)   12288       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_205 (Conv2D)             (None, 35, 35, 96)   55296       activation_204[0][0]             
__________________________________________________________________________________________________
batch_normalization_202 (BatchN (None, 35, 35, 48)   144         conv2d_202[0][0]                 
__________________________________________________________________________________________________
batch_normalization_205 (BatchN (None, 35, 35, 96)   288         conv2d_205[0][0]                 
__________________________________________________________________________________________________
activation_202 (Activation)     (None, 35, 35, 48)   0           batch_normalization_202[0][0]    
__________________________________________________________________________________________________
activation_205 (Activation)     (None, 35, 35, 96)   0           batch_normalization_205[0][0]    
__________________________________________________________________________________________________
average_pooling2d_20 (AveragePo (None, 35, 35, 256)  0           mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_201 (Conv2D)             (None, 35, 35, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_203 (Conv2D)             (None, 35, 35, 64)   76800       activation_202[0][0]             
__________________________________________________________________________________________________
conv2d_206 (Conv2D)             (None, 35, 35, 96)   82944       activation_205[0][0]             
__________________________________________________________________________________________________
conv2d_207 (Conv2D)             (None, 35, 35, 64)   16384       average_pooling2d_20[0][0]       
__________________________________________________________________________________________________
batch_normalization_201 (BatchN (None, 35, 35, 64)   192         conv2d_201[0][0]                 
__________________________________________________________________________________________________
batch_normalization_203 (BatchN (None, 35, 35, 64)   192         conv2d_203[0][0]                 
__________________________________________________________________________________________________
batch_normalization_206 (BatchN (None, 35, 35, 96)   288         conv2d_206[0][0]                 
__________________________________________________________________________________________________
batch_normalization_207 (BatchN (None, 35, 35, 64)   192         conv2d_207[0][0]                 
__________________________________________________________________________________________________
activation_201 (Activation)     (None, 35, 35, 64)   0           batch_normalization_201[0][0]    
__________________________________________________________________________________________________
activation_203 (Activation)     (None, 35, 35, 64)   0           batch_normalization_203[0][0]    
__________________________________________________________________________________________________
activation_206 (Activation)     (None, 35, 35, 96)   0           batch_normalization_206[0][0]    
__________________________________________________________________________________________________
activation_207 (Activation)     (None, 35, 35, 64)   0           batch_normalization_207[0][0]    
__________________________________________________________________________________________________
mixed1 (Concatenate)            (None, 35, 35, 288)  0           activation_201[0][0]             
                                                                 activation_203[0][0]             
                                                                 activation_206[0][0]             
                                                                 activation_207[0][0]             
__________________________________________________________________________________________________
conv2d_211 (Conv2D)             (None, 35, 35, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
batch_normalization_211 (BatchN (None, 35, 35, 64)   192         conv2d_211[0][0]                 
__________________________________________________________________________________________________
activation_211 (Activation)     (None, 35, 35, 64)   0           batch_normalization_211[0][0]    
__________________________________________________________________________________________________
conv2d_209 (Conv2D)             (None, 35, 35, 48)   13824       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_212 (Conv2D)             (None, 35, 35, 96)   55296       activation_211[0][0]             
__________________________________________________________________________________________________
batch_normalization_209 (BatchN (None, 35, 35, 48)   144         conv2d_209[0][0]                 
__________________________________________________________________________________________________
batch_normalization_212 (BatchN (None, 35, 35, 96)   288         conv2d_212[0][0]                 
__________________________________________________________________________________________________
activation_209 (Activation)     (None, 35, 35, 48)   0           batch_normalization_209[0][0]    
__________________________________________________________________________________________________
activation_212 (Activation)     (None, 35, 35, 96)   0           batch_normalization_212[0][0]    
__________________________________________________________________________________________________
average_pooling2d_21 (AveragePo (None, 35, 35, 288)  0           mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_208 (Conv2D)             (None, 35, 35, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_210 (Conv2D)             (None, 35, 35, 64)   76800       activation_209[0][0]             
__________________________________________________________________________________________________
conv2d_213 (Conv2D)             (None, 35, 35, 96)   82944       activation_212[0][0]             
__________________________________________________________________________________________________
conv2d_214 (Conv2D)             (None, 35, 35, 64)   18432       average_pooling2d_21[0][0]       
__________________________________________________________________________________________________
batch_normalization_208 (BatchN (None, 35, 35, 64)   192         conv2d_208[0][0]                 
__________________________________________________________________________________________________
batch_normalization_210 (BatchN (None, 35, 35, 64)   192         conv2d_210[0][0]                 
__________________________________________________________________________________________________
batch_normalization_213 (BatchN (None, 35, 35, 96)   288         conv2d_213[0][0]                 
__________________________________________________________________________________________________
batch_normalization_214 (BatchN (None, 35, 35, 64)   192         conv2d_214[0][0]                 
__________________________________________________________________________________________________
activation_208 (Activation)     (None, 35, 35, 64)   0           batch_normalization_208[0][0]    
__________________________________________________________________________________________________
activation_210 (Activation)     (None, 35, 35, 64)   0           batch_normalization_210[0][0]    
__________________________________________________________________________________________________
activation_213 (Activation)     (None, 35, 35, 96)   0           batch_normalization_213[0][0]    
__________________________________________________________________________________________________
activation_214 (Activation)     (None, 35, 35, 64)   0           batch_normalization_214[0][0]    
__________________________________________________________________________________________________
mixed2 (Concatenate)            (None, 35, 35, 288)  0           activation_208[0][0]             
                                                                 activation_210[0][0]             
                                                                 activation_213[0][0]             
                                                                 activation_214[0][0]             
__________________________________________________________________________________________________
conv2d_216 (Conv2D)             (None, 35, 35, 64)   18432       mixed2[0][0]                     
__________________________________________________________________________________________________
batch_normalization_216 (BatchN (None, 35, 35, 64)   192         conv2d_216[0][0]                 
__________________________________________________________________________________________________
activation_216 (Activation)     (None, 35, 35, 64)   0           batch_normalization_216[0][0]    
__________________________________________________________________________________________________
conv2d_217 (Conv2D)             (None, 35, 35, 96)   55296       activation_216[0][0]             
__________________________________________________________________________________________________
batch_normalization_217 (BatchN (None, 35, 35, 96)   288         conv2d_217[0][0]                 
__________________________________________________________________________________________________
activation_217 (Activation)     (None, 35, 35, 96)   0           batch_normalization_217[0][0]    
__________________________________________________________________________________________________
conv2d_215 (Conv2D)             (None, 17, 17, 384)  995328      mixed2[0][0]                     
__________________________________________________________________________________________________
conv2d_218 (Conv2D)             (None, 17, 17, 96)   82944       activation_217[0][0]             
__________________________________________________________________________________________________
batch_normalization_215 (BatchN (None, 17, 17, 384)  1152        conv2d_215[0][0]                 
__________________________________________________________________________________________________
batch_normalization_218 (BatchN (None, 17, 17, 96)   288         conv2d_218[0][0]                 
__________________________________________________________________________________________________
activation_215 (Activation)     (None, 17, 17, 384)  0           batch_normalization_215[0][0]    
__________________________________________________________________________________________________
activation_218 (Activation)     (None, 17, 17, 96)   0           batch_normalization_218[0][0]    
__________________________________________________________________________________________________
max_pooling2d_11 (MaxPooling2D) (None, 17, 17, 288)  0           mixed2[0][0]                     
__________________________________________________________________________________________________
mixed3 (Concatenate)            (None, 17, 17, 768)  0           activation_215[0][0]             
                                                                 activation_218[0][0]             
                                                                 max_pooling2d_11[0][0]           
__________________________________________________________________________________________________
conv2d_223 (Conv2D)             (None, 17, 17, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
batch_normalization_223 (BatchN (None, 17, 17, 128)  384         conv2d_223[0][0]                 
__________________________________________________________________________________________________
activation_223 (Activation)     (None, 17, 17, 128)  0           batch_normalization_223[0][0]    
__________________________________________________________________________________________________
conv2d_224 (Conv2D)             (None, 17, 17, 128)  114688      activation_223[0][0]             
__________________________________________________________________________________________________
batch_normalization_224 (BatchN (None, 17, 17, 128)  384         conv2d_224[0][0]                 
__________________________________________________________________________________________________
activation_224 (Activation)     (None, 17, 17, 128)  0           batch_normalization_224[0][0]    
__________________________________________________________________________________________________
conv2d_220 (Conv2D)             (None, 17, 17, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_225 (Conv2D)             (None, 17, 17, 128)  114688      activation_224[0][0]             
__________________________________________________________________________________________________
batch_normalization_220 (BatchN (None, 17, 17, 128)  384         conv2d_220[0][0]                 
__________________________________________________________________________________________________
batch_normalization_225 (BatchN (None, 17, 17, 128)  384         conv2d_225[0][0]                 
__________________________________________________________________________________________________
activation_220 (Activation)     (None, 17, 17, 128)  0           batch_normalization_220[0][0]    
__________________________________________________________________________________________________
activation_225 (Activation)     (None, 17, 17, 128)  0           batch_normalization_225[0][0]    
__________________________________________________________________________________________________
conv2d_221 (Conv2D)             (None, 17, 17, 128)  114688      activation_220[0][0]             
__________________________________________________________________________________________________
conv2d_226 (Conv2D)             (None, 17, 17, 128)  114688      activation_225[0][0]             
__________________________________________________________________________________________________
batch_normalization_221 (BatchN (None, 17, 17, 128)  384         conv2d_221[0][0]                 
__________________________________________________________________________________________________
batch_normalization_226 (BatchN (None, 17, 17, 128)  384         conv2d_226[0][0]                 
__________________________________________________________________________________________________
activation_221 (Activation)     (None, 17, 17, 128)  0           batch_normalization_221[0][0]    
__________________________________________________________________________________________________
activation_226 (Activation)     (None, 17, 17, 128)  0           batch_normalization_226[0][0]    
__________________________________________________________________________________________________
average_pooling2d_22 (AveragePo (None, 17, 17, 768)  0           mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_219 (Conv2D)             (None, 17, 17, 192)  147456      mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_222 (Conv2D)             (None, 17, 17, 192)  172032      activation_221[0][0]             
__________________________________________________________________________________________________
conv2d_227 (Conv2D)             (None, 17, 17, 192)  172032      activation_226[0][0]             
__________________________________________________________________________________________________
conv2d_228 (Conv2D)             (None, 17, 17, 192)  147456      average_pooling2d_22[0][0]       
__________________________________________________________________________________________________
batch_normalization_219 (BatchN (None, 17, 17, 192)  576         conv2d_219[0][0]                 
__________________________________________________________________________________________________
batch_normalization_222 (BatchN (None, 17, 17, 192)  576         conv2d_222[0][0]                 
__________________________________________________________________________________________________
batch_normalization_227 (BatchN (None, 17, 17, 192)  576         conv2d_227[0][0]                 
__________________________________________________________________________________________________
batch_normalization_228 (BatchN (None, 17, 17, 192)  576         conv2d_228[0][0]                 
__________________________________________________________________________________________________
activation_219 (Activation)     (None, 17, 17, 192)  0           batch_normalization_219[0][0]    
__________________________________________________________________________________________________
activation_222 (Activation)     (None, 17, 17, 192)  0           batch_normalization_222[0][0]    
__________________________________________________________________________________________________
activation_227 (Activation)     (None, 17, 17, 192)  0           batch_normalization_227[0][0]    
__________________________________________________________________________________________________
activation_228 (Activation)     (None, 17, 17, 192)  0           batch_normalization_228[0][0]    
__________________________________________________________________________________________________
mixed4 (Concatenate)            (None, 17, 17, 768)  0           activation_219[0][0]             
                                                                 activation_222[0][0]             
                                                                 activation_227[0][0]             
                                                                 activation_228[0][0]             
__________________________________________________________________________________________________
conv2d_233 (Conv2D)             (None, 17, 17, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
batch_normalization_233 (BatchN (None, 17, 17, 160)  480         conv2d_233[0][0]                 
__________________________________________________________________________________________________
activation_233 (Activation)     (None, 17, 17, 160)  0           batch_normalization_233[0][0]    
__________________________________________________________________________________________________
conv2d_234 (Conv2D)             (None, 17, 17, 160)  179200      activation_233[0][0]             
__________________________________________________________________________________________________
batch_normalization_234 (BatchN (None, 17, 17, 160)  480         conv2d_234[0][0]                 
__________________________________________________________________________________________________
activation_234 (Activation)     (None, 17, 17, 160)  0           batch_normalization_234[0][0]    
__________________________________________________________________________________________________
conv2d_230 (Conv2D)             (None, 17, 17, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_235 (Conv2D)             (None, 17, 17, 160)  179200      activation_234[0][0]             
__________________________________________________________________________________________________
batch_normalization_230 (BatchN (None, 17, 17, 160)  480         conv2d_230[0][0]                 
__________________________________________________________________________________________________
batch_normalization_235 (BatchN (None, 17, 17, 160)  480         conv2d_235[0][0]                 
__________________________________________________________________________________________________
activation_230 (Activation)     (None, 17, 17, 160)  0           batch_normalization_230[0][0]    
__________________________________________________________________________________________________
activation_235 (Activation)     (None, 17, 17, 160)  0           batch_normalization_235[0][0]    
__________________________________________________________________________________________________
conv2d_231 (Conv2D)             (None, 17, 17, 160)  179200      activation_230[0][0]             
__________________________________________________________________________________________________
conv2d_236 (Conv2D)             (None, 17, 17, 160)  179200      activation_235[0][0]             
__________________________________________________________________________________________________
batch_normalization_231 (BatchN (None, 17, 17, 160)  480         conv2d_231[0][0]                 
__________________________________________________________________________________________________
batch_normalization_236 (BatchN (None, 17, 17, 160)  480         conv2d_236[0][0]                 
__________________________________________________________________________________________________
activation_231 (Activation)     (None, 17, 17, 160)  0           batch_normalization_231[0][0]    
__________________________________________________________________________________________________
activation_236 (Activation)     (None, 17, 17, 160)  0           batch_normalization_236[0][0]    
__________________________________________________________________________________________________
average_pooling2d_23 (AveragePo (None, 17, 17, 768)  0           mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_229 (Conv2D)             (None, 17, 17, 192)  147456      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_232 (Conv2D)             (None, 17, 17, 192)  215040      activation_231[0][0]             
__________________________________________________________________________________________________
conv2d_237 (Conv2D)             (None, 17, 17, 192)  215040      activation_236[0][0]             
__________________________________________________________________________________________________
conv2d_238 (Conv2D)             (None, 17, 17, 192)  147456      average_pooling2d_23[0][0]       
__________________________________________________________________________________________________
batch_normalization_229 (BatchN (None, 17, 17, 192)  576         conv2d_229[0][0]                 
__________________________________________________________________________________________________
batch_normalization_232 (BatchN (None, 17, 17, 192)  576         conv2d_232[0][0]                 
__________________________________________________________________________________________________
batch_normalization_237 (BatchN (None, 17, 17, 192)  576         conv2d_237[0][0]                 
__________________________________________________________________________________________________
batch_normalization_238 (BatchN (None, 17, 17, 192)  576         conv2d_238[0][0]                 
__________________________________________________________________________________________________
activation_229 (Activation)     (None, 17, 17, 192)  0           batch_normalization_229[0][0]    
__________________________________________________________________________________________________
activation_232 (Activation)     (None, 17, 17, 192)  0           batch_normalization_232[0][0]    
__________________________________________________________________________________________________
activation_237 (Activation)     (None, 17, 17, 192)  0           batch_normalization_237[0][0]    
__________________________________________________________________________________________________
activation_238 (Activation)     (None, 17, 17, 192)  0           batch_normalization_238[0][0]    
__________________________________________________________________________________________________
mixed5 (Concatenate)            (None, 17, 17, 768)  0           activation_229[0][0]             
                                                                 activation_232[0][0]             
                                                                 activation_237[0][0]             
                                                                 activation_238[0][0]             
__________________________________________________________________________________________________
conv2d_243 (Conv2D)             (None, 17, 17, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
batch_normalization_243 (BatchN (None, 17, 17, 160)  480         conv2d_243[0][0]                 
__________________________________________________________________________________________________
activation_243 (Activation)     (None, 17, 17, 160)  0           batch_normalization_243[0][0]    
__________________________________________________________________________________________________
conv2d_244 (Conv2D)             (None, 17, 17, 160)  179200      activation_243[0][0]             
__________________________________________________________________________________________________
batch_normalization_244 (BatchN (None, 17, 17, 160)  480         conv2d_244[0][0]                 
__________________________________________________________________________________________________
activation_244 (Activation)     (None, 17, 17, 160)  0           batch_normalization_244[0][0]    
__________________________________________________________________________________________________
conv2d_240 (Conv2D)             (None, 17, 17, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_245 (Conv2D)             (None, 17, 17, 160)  179200      activation_244[0][0]             
__________________________________________________________________________________________________
batch_normalization_240 (BatchN (None, 17, 17, 160)  480         conv2d_240[0][0]                 
__________________________________________________________________________________________________
batch_normalization_245 (BatchN (None, 17, 17, 160)  480         conv2d_245[0][0]                 
__________________________________________________________________________________________________
activation_240 (Activation)     (None, 17, 17, 160)  0           batch_normalization_240[0][0]    
__________________________________________________________________________________________________
activation_245 (Activation)     (None, 17, 17, 160)  0           batch_normalization_245[0][0]    
__________________________________________________________________________________________________
conv2d_241 (Conv2D)             (None, 17, 17, 160)  179200      activation_240[0][0]             
__________________________________________________________________________________________________
conv2d_246 (Conv2D)             (None, 17, 17, 160)  179200      activation_245[0][0]             
__________________________________________________________________________________________________
batch_normalization_241 (BatchN (None, 17, 17, 160)  480         conv2d_241[0][0]                 
__________________________________________________________________________________________________
batch_normalization_246 (BatchN (None, 17, 17, 160)  480         conv2d_246[0][0]                 
__________________________________________________________________________________________________
activation_241 (Activation)     (None, 17, 17, 160)  0           batch_normalization_241[0][0]    
__________________________________________________________________________________________________
activation_246 (Activation)     (None, 17, 17, 160)  0           batch_normalization_246[0][0]    
__________________________________________________________________________________________________
average_pooling2d_24 (AveragePo (None, 17, 17, 768)  0           mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_239 (Conv2D)             (None, 17, 17, 192)  147456      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_242 (Conv2D)             (None, 17, 17, 192)  215040      activation_241[0][0]             
__________________________________________________________________________________________________
conv2d_247 (Conv2D)             (None, 17, 17, 192)  215040      activation_246[0][0]             
__________________________________________________________________________________________________
conv2d_248 (Conv2D)             (None, 17, 17, 192)  147456      average_pooling2d_24[0][0]       
__________________________________________________________________________________________________
batch_normalization_239 (BatchN (None, 17, 17, 192)  576         conv2d_239[0][0]                 
__________________________________________________________________________________________________
batch_normalization_242 (BatchN (None, 17, 17, 192)  576         conv2d_242[0][0]                 
__________________________________________________________________________________________________
batch_normalization_247 (BatchN (None, 17, 17, 192)  576         conv2d_247[0][0]                 
__________________________________________________________________________________________________
batch_normalization_248 (BatchN (None, 17, 17, 192)  576         conv2d_248[0][0]                 
__________________________________________________________________________________________________
activation_239 (Activation)     (None, 17, 17, 192)  0           batch_normalization_239[0][0]    
__________________________________________________________________________________________________
activation_242 (Activation)     (None, 17, 17, 192)  0           batch_normalization_242[0][0]    
__________________________________________________________________________________________________
activation_247 (Activation)     (None, 17, 17, 192)  0           batch_normalization_247[0][0]    
__________________________________________________________________________________________________
activation_248 (Activation)     (None, 17, 17, 192)  0           batch_normalization_248[0][0]    
__________________________________________________________________________________________________
mixed6 (Concatenate)            (None, 17, 17, 768)  0           activation_239[0][0]             
                                                                 activation_242[0][0]             
                                                                 activation_247[0][0]             
                                                                 activation_248[0][0]             
__________________________________________________________________________________________________
conv2d_253 (Conv2D)             (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
batch_normalization_253 (BatchN (None, 17, 17, 192)  576         conv2d_253[0][0]                 
__________________________________________________________________________________________________
activation_253 (Activation)     (None, 17, 17, 192)  0           batch_normalization_253[0][0]    
__________________________________________________________________________________________________
conv2d_254 (Conv2D)             (None, 17, 17, 192)  258048      activation_253[0][0]             
__________________________________________________________________________________________________
batch_normalization_254 (BatchN (None, 17, 17, 192)  576         conv2d_254[0][0]                 
__________________________________________________________________________________________________
activation_254 (Activation)     (None, 17, 17, 192)  0           batch_normalization_254[0][0]    
__________________________________________________________________________________________________
conv2d_250 (Conv2D)             (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_255 (Conv2D)             (None, 17, 17, 192)  258048      activation_254[0][0]             
__________________________________________________________________________________________________
batch_normalization_250 (BatchN (None, 17, 17, 192)  576         conv2d_250[0][0]                 
__________________________________________________________________________________________________
batch_normalization_255 (BatchN (None, 17, 17, 192)  576         conv2d_255[0][0]                 
__________________________________________________________________________________________________
activation_250 (Activation)     (None, 17, 17, 192)  0           batch_normalization_250[0][0]    
__________________________________________________________________________________________________
activation_255 (Activation)     (None, 17, 17, 192)  0           batch_normalization_255[0][0]    
__________________________________________________________________________________________________
conv2d_251 (Conv2D)             (None, 17, 17, 192)  258048      activation_250[0][0]             
__________________________________________________________________________________________________
conv2d_256 (Conv2D)             (None, 17, 17, 192)  258048      activation_255[0][0]             
__________________________________________________________________________________________________
batch_normalization_251 (BatchN (None, 17, 17, 192)  576         conv2d_251[0][0]                 
__________________________________________________________________________________________________
batch_normalization_256 (BatchN (None, 17, 17, 192)  576         conv2d_256[0][0]                 
__________________________________________________________________________________________________
activation_251 (Activation)     (None, 17, 17, 192)  0           batch_normalization_251[0][0]    
__________________________________________________________________________________________________
activation_256 (Activation)     (None, 17, 17, 192)  0           batch_normalization_256[0][0]    
__________________________________________________________________________________________________
average_pooling2d_25 (AveragePo (None, 17, 17, 768)  0           mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_249 (Conv2D)             (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_252 (Conv2D)             (None, 17, 17, 192)  258048      activation_251[0][0]             
__________________________________________________________________________________________________
conv2d_257 (Conv2D)             (None, 17, 17, 192)  258048      activation_256[0][0]             
__________________________________________________________________________________________________
conv2d_258 (Conv2D)             (None, 17, 17, 192)  147456      average_pooling2d_25[0][0]       
__________________________________________________________________________________________________
batch_normalization_249 (BatchN (None, 17, 17, 192)  576         conv2d_249[0][0]                 
__________________________________________________________________________________________________
batch_normalization_252 (BatchN (None, 17, 17, 192)  576         conv2d_252[0][0]                 
__________________________________________________________________________________________________
batch_normalization_257 (BatchN (None, 17, 17, 192)  576         conv2d_257[0][0]                 
__________________________________________________________________________________________________
batch_normalization_258 (BatchN (None, 17, 17, 192)  576         conv2d_258[0][0]                 
__________________________________________________________________________________________________
activation_249 (Activation)     (None, 17, 17, 192)  0           batch_normalization_249[0][0]    
__________________________________________________________________________________________________
activation_252 (Activation)     (None, 17, 17, 192)  0           batch_normalization_252[0][0]    
__________________________________________________________________________________________________
activation_257 (Activation)     (None, 17, 17, 192)  0           batch_normalization_257[0][0]    
__________________________________________________________________________________________________
activation_258 (Activation)     (None, 17, 17, 192)  0           batch_normalization_258[0][0]    
__________________________________________________________________________________________________
mixed7 (Concatenate)            (None, 17, 17, 768)  0           activation_249[0][0]             
                                                                 activation_252[0][0]             
                                                                 activation_257[0][0]             
                                                                 activation_258[0][0]             
__________________________________________________________________________________________________
conv2d_261 (Conv2D)             (None, 17, 17, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
batch_normalization_261 (BatchN (None, 17, 17, 192)  576         conv2d_261[0][0]                 
__________________________________________________________________________________________________
activation_261 (Activation)     (None, 17, 17, 192)  0           batch_normalization_261[0][0]    
__________________________________________________________________________________________________
conv2d_262 (Conv2D)             (None, 17, 17, 192)  258048      activation_261[0][0]             
__________________________________________________________________________________________________
batch_normalization_262 (BatchN (None, 17, 17, 192)  576         conv2d_262[0][0]                 
__________________________________________________________________________________________________
activation_262 (Activation)     (None, 17, 17, 192)  0           batch_normalization_262[0][0]    
__________________________________________________________________________________________________
conv2d_259 (Conv2D)             (None, 17, 17, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
conv2d_263 (Conv2D)             (None, 17, 17, 192)  258048      activation_262[0][0]             
__________________________________________________________________________________________________
batch_normalization_259 (BatchN (None, 17, 17, 192)  576         conv2d_259[0][0]                 
__________________________________________________________________________________________________
batch_normalization_263 (BatchN (None, 17, 17, 192)  576         conv2d_263[0][0]                 
__________________________________________________________________________________________________
activation_259 (Activation)     (None, 17, 17, 192)  0           batch_normalization_259[0][0]    
__________________________________________________________________________________________________
activation_263 (Activation)     (None, 17, 17, 192)  0           batch_normalization_263[0][0]    
__________________________________________________________________________________________________
conv2d_260 (Conv2D)             (None, 8, 8, 320)    552960      activation_259[0][0]             
__________________________________________________________________________________________________
conv2d_264 (Conv2D)             (None, 8, 8, 192)    331776      activation_263[0][0]             
__________________________________________________________________________________________________
batch_normalization_260 (BatchN (None, 8, 8, 320)    960         conv2d_260[0][0]                 
__________________________________________________________________________________________________
batch_normalization_264 (BatchN (None, 8, 8, 192)    576         conv2d_264[0][0]                 
__________________________________________________________________________________________________
activation_260 (Activation)     (None, 8, 8, 320)    0           batch_normalization_260[0][0]    
__________________________________________________________________________________________________
activation_264 (Activation)     (None, 8, 8, 192)    0           batch_normalization_264[0][0]    
__________________________________________________________________________________________________
max_pooling2d_12 (MaxPooling2D) (None, 8, 8, 768)    0           mixed7[0][0]                     
__________________________________________________________________________________________________
mixed8 (Concatenate)            (None, 8, 8, 1280)   0           activation_260[0][0]             
                                                                 activation_264[0][0]             
                                                                 max_pooling2d_12[0][0]           
__________________________________________________________________________________________________
conv2d_269 (Conv2D)             (None, 8, 8, 448)    573440      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_269 (BatchN (None, 8, 8, 448)    1344        conv2d_269[0][0]                 
__________________________________________________________________________________________________
activation_269 (Activation)     (None, 8, 8, 448)    0           batch_normalization_269[0][0]    
__________________________________________________________________________________________________
conv2d_266 (Conv2D)             (None, 8, 8, 384)    491520      mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_270 (Conv2D)             (None, 8, 8, 384)    1548288     activation_269[0][0]             
__________________________________________________________________________________________________
batch_normalization_266 (BatchN (None, 8, 8, 384)    1152        conv2d_266[0][0]                 
__________________________________________________________________________________________________
batch_normalization_270 (BatchN (None, 8, 8, 384)    1152        conv2d_270[0][0]                 
__________________________________________________________________________________________________
activation_266 (Activation)     (None, 8, 8, 384)    0           batch_normalization_266[0][0]    
__________________________________________________________________________________________________
activation_270 (Activation)     (None, 8, 8, 384)    0           batch_normalization_270[0][0]    
__________________________________________________________________________________________________
conv2d_267 (Conv2D)             (None, 8, 8, 384)    442368      activation_266[0][0]             
__________________________________________________________________________________________________
conv2d_268 (Conv2D)             (None, 8, 8, 384)    442368      activation_266[0][0]             
__________________________________________________________________________________________________
conv2d_271 (Conv2D)             (None, 8, 8, 384)    442368      activation_270[0][0]             
__________________________________________________________________________________________________
conv2d_272 (Conv2D)             (None, 8, 8, 384)    442368      activation_270[0][0]             
__________________________________________________________________________________________________
average_pooling2d_26 (AveragePo (None, 8, 8, 1280)   0           mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_265 (Conv2D)             (None, 8, 8, 320)    409600      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_267 (BatchN (None, 8, 8, 384)    1152        conv2d_267[0][0]                 
__________________________________________________________________________________________________
batch_normalization_268 (BatchN (None, 8, 8, 384)    1152        conv2d_268[0][0]                 
__________________________________________________________________________________________________
batch_normalization_271 (BatchN (None, 8, 8, 384)    1152        conv2d_271[0][0]                 
__________________________________________________________________________________________________
batch_normalization_272 (BatchN (None, 8, 8, 384)    1152        conv2d_272[0][0]                 
__________________________________________________________________________________________________
conv2d_273 (Conv2D)             (None, 8, 8, 192)    245760      average_pooling2d_26[0][0]       
__________________________________________________________________________________________________
batch_normalization_265 (BatchN (None, 8, 8, 320)    960         conv2d_265[0][0]                 
__________________________________________________________________________________________________
activation_267 (Activation)     (None, 8, 8, 384)    0           batch_normalization_267[0][0]    
__________________________________________________________________________________________________
activation_268 (Activation)     (None, 8, 8, 384)    0           batch_normalization_268[0][0]    
__________________________________________________________________________________________________
activation_271 (Activation)     (None, 8, 8, 384)    0           batch_normalization_271[0][0]    
__________________________________________________________________________________________________
activation_272 (Activation)     (None, 8, 8, 384)    0           batch_normalization_272[0][0]    
__________________________________________________________________________________________________
batch_normalization_273 (BatchN (None, 8, 8, 192)    576         conv2d_273[0][0]                 
__________________________________________________________________________________________________
activation_265 (Activation)     (None, 8, 8, 320)    0           batch_normalization_265[0][0]    
__________________________________________________________________________________________________
mixed9_0 (Concatenate)          (None, 8, 8, 768)    0           activation_267[0][0]             
                                                                 activation_268[0][0]             
__________________________________________________________________________________________________
concatenate_5 (Concatenate)     (None, 8, 8, 768)    0           activation_271[0][0]             
                                                                 activation_272[0][0]             
__________________________________________________________________________________________________
activation_273 (Activation)     (None, 8, 8, 192)    0           batch_normalization_273[0][0]    
__________________________________________________________________________________________________
mixed9 (Concatenate)            (None, 8, 8, 2048)   0           activation_265[0][0]             
                                                                 mixed9_0[0][0]                   
                                                                 concatenate_5[0][0]              
                                                                 activation_273[0][0]             
__________________________________________________________________________________________________
conv2d_278 (Conv2D)             (None, 8, 8, 448)    917504      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_278 (BatchN (None, 8, 8, 448)    1344        conv2d_278[0][0]                 
__________________________________________________________________________________________________
activation_278 (Activation)     (None, 8, 8, 448)    0           batch_normalization_278[0][0]    
__________________________________________________________________________________________________
conv2d_275 (Conv2D)             (None, 8, 8, 384)    786432      mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_279 (Conv2D)             (None, 8, 8, 384)    1548288     activation_278[0][0]             
__________________________________________________________________________________________________
batch_normalization_275 (BatchN (None, 8, 8, 384)    1152        conv2d_275[0][0]                 
__________________________________________________________________________________________________
batch_normalization_279 (BatchN (None, 8, 8, 384)    1152        conv2d_279[0][0]                 
__________________________________________________________________________________________________
activation_275 (Activation)     (None, 8, 8, 384)    0           batch_normalization_275[0][0]    
__________________________________________________________________________________________________
activation_279 (Activation)     (None, 8, 8, 384)    0           batch_normalization_279[0][0]    
__________________________________________________________________________________________________
conv2d_276 (Conv2D)             (None, 8, 8, 384)    442368      activation_275[0][0]             
__________________________________________________________________________________________________
conv2d_277 (Conv2D)             (None, 8, 8, 384)    442368      activation_275[0][0]             
__________________________________________________________________________________________________
conv2d_280 (Conv2D)             (None, 8, 8, 384)    442368      activation_279[0][0]             
__________________________________________________________________________________________________
conv2d_281 (Conv2D)             (None, 8, 8, 384)    442368      activation_279[0][0]             
__________________________________________________________________________________________________
average_pooling2d_27 (AveragePo (None, 8, 8, 2048)   0           mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_274 (Conv2D)             (None, 8, 8, 320)    655360      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_276 (BatchN (None, 8, 8, 384)    1152        conv2d_276[0][0]                 
__________________________________________________________________________________________________
batch_normalization_277 (BatchN (None, 8, 8, 384)    1152        conv2d_277[0][0]                 
__________________________________________________________________________________________________
batch_normalization_280 (BatchN (None, 8, 8, 384)    1152        conv2d_280[0][0]                 
__________________________________________________________________________________________________
batch_normalization_281 (BatchN (None, 8, 8, 384)    1152        conv2d_281[0][0]                 
__________________________________________________________________________________________________
conv2d_282 (Conv2D)             (None, 8, 8, 192)    393216      average_pooling2d_27[0][0]       
__________________________________________________________________________________________________
batch_normalization_274 (BatchN (None, 8, 8, 320)    960         conv2d_274[0][0]                 
__________________________________________________________________________________________________
activation_276 (Activation)     (None, 8, 8, 384)    0           batch_normalization_276[0][0]    
__________________________________________________________________________________________________
activation_277 (Activation)     (None, 8, 8, 384)    0           batch_normalization_277[0][0]    
__________________________________________________________________________________________________
activation_280 (Activation)     (None, 8, 8, 384)    0           batch_normalization_280[0][0]    
__________________________________________________________________________________________________
activation_281 (Activation)     (None, 8, 8, 384)    0           batch_normalization_281[0][0]    
__________________________________________________________________________________________________
batch_normalization_282 (BatchN (None, 8, 8, 192)    576         conv2d_282[0][0]                 
__________________________________________________________________________________________________
activation_274 (Activation)     (None, 8, 8, 320)    0           batch_normalization_274[0][0]    
__________________________________________________________________________________________________
mixed9_1 (Concatenate)          (None, 8, 8, 768)    0           activation_276[0][0]             
                                                                 activation_277[0][0]             
__________________________________________________________________________________________________
concatenate_6 (Concatenate)     (None, 8, 8, 768)    0           activation_280[0][0]             
                                                                 activation_281[0][0]             
__________________________________________________________________________________________________
activation_282 (Activation)     (None, 8, 8, 192)    0           batch_normalization_282[0][0]    
__________________________________________________________________________________________________
mixed10 (Concatenate)           (None, 8, 8, 2048)   0           activation_274[0][0]             
                                                                 mixed9_1[0][0]                   
                                                                 concatenate_6[0][0]              
                                                                 activation_282[0][0]             
__________________________________________________________________________________________________
global_average_pooling2d_3 (Glo (None, 2048)         0           mixed10[0][0]                    
__________________________________________________________________________________________________
dropout_3 (Dropout)             (None, 2048)         0           global_average_pooling2d_3[0][0] 
__________________________________________________________________________________________________
dense_3 (Dense)                 (None, 1)            2049        dropout_3[0][0]                  
==================================================================================================
Total params: 21,804,833
Trainable params: 2,049
Non-trainable params: 21,802,784
__________________________________________________________________________________________________
In [11]:
earlystop = EarlyStopping(monitor='val_loss', patience=5, verbose=0, mode='auto')

model_InceptionV3.compile(loss = 'binary_crossentropy', optimizer = 'Adam', metrics =['accuracy'])
model_InceptionV3_history = model_InceptionV3.fit_generator(train_generator(X_train, y_train, batch_size=batch_size),
                                                    steps_per_epoch=len(y_train)//batch_size, epochs=epochs,
                                                    validation_data = val_generator(X_val, y_val, batch_size=batch_size),
                                                    validation_steps=len(y_val)//batch_size,
                                                    callbacks = [earlystop],
                                                    verbose=2)
Epoch 1/10
 - 240s - loss: 0.2065 - acc: 0.9227 - val_loss: 0.0825 - val_acc: 0.9724
Epoch 2/10
 - 240s - loss: 0.1164 - acc: 0.9567 - val_loss: 0.0743 - val_acc: 0.9754
Epoch 3/10
 - 240s - loss: 0.1058 - acc: 0.9597 - val_loss: 0.0787 - val_acc: 0.9742
Epoch 4/10
 - 240s - loss: 0.1031 - acc: 0.9617 - val_loss: 0.0782 - val_acc: 0.9760
Epoch 5/10
 - 240s - loss: 0.0967 - acc: 0.9637 - val_loss: 0.0736 - val_acc: 0.9778
Epoch 6/10
 - 240s - loss: 0.0939 - acc: 0.9636 - val_loss: 0.0654 - val_acc: 0.9810
Epoch 7/10
 - 240s - loss: 0.0945 - acc: 0.9646 - val_loss: 0.0840 - val_acc: 0.9744
Epoch 8/10
 - 240s - loss: 0.0938 - acc: 0.9648 - val_loss: 0.0822 - val_acc: 0.9756
Epoch 9/10
 - 240s - loss: 0.0914 - acc: 0.9663 - val_loss: 0.0721 - val_acc: 0.9788
Epoch 10/10
 - 240s - loss: 0.0912 - acc: 0.9648 - val_loss: 0.0848 - val_acc: 0.9752
In [35]:
from matplotlib import pyplot as plt  
history=model_InceptionV3_history  
# summarize history for accuracy
plt.plot(history.history['acc'])  
plt.plot(history.history['val_acc'])
plt.title('model accuracy')  
plt.ylabel('accuracy')  
plt.xlabel('epoch')  
plt.grid()
plt.legend(['train', 'test'], loc='lower right')  
plt.show()  
# summarize history for loss  
plt.plot(history.history['loss'])  
plt.plot(history.history['val_loss'])  
plt.title('model loss')  
plt.ylabel('loss')  
plt.xlabel('epoch')  
plt.grid()
plt.legend(['train', 'test'], loc='upper right')  
plt.show()  
In [14]:
# define train_generator function, yield batchs of train and validation data 
def train_gen_224(X, y, batch_size): 
    
    datalen = len(y)
    counter = datalen//batch_size
    while (True):
        
        for i in range(counter):
            X_224 = np.zeros((batch_size,224,224,3),dtype = np.float)
            for j in range(batch_size):
                X_224[j] = cv2.resize(cv2.imread(X[i* batch_size+j]),(224,224))
                
            X_224 = resnet_input(X_224)
            yield X_224, y[i * batch_size:(i + 1) * batch_size]

def val_gen_224(X, y, batch_size):
    
    datalen = len(y)
    counter = datalen//batch_size
    while (True):
        
        for i in range(counter):
            X_224 = np.zeros((batch_size,224,224,3),dtype = np.float)
            for j in range(batch_size):
                X_224[j] = cv2.resize(cv2.imread(X[i* batch_size+j]),(224,224))
                
            X_224 = resnet_input(X_224)
            yield X_224, y[i * batch_size:(i + 1) * batch_size]
In [15]:
base_model = ResNet50(include_top=False, weights='imagenet', input_shape=(224,224,3),pooling='avg')
for layer in base_model.layers:
    layer.trainable = False
x = Dropout(0.5)(base_model.output)
x = Dense(1, activation='sigmoid')(x)
model_ResNet50 = Model(inputs = base_model.input, outputs=x, name = 'model_ResNet50')
model_ResNet50.summary()
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_4 (InputLayer)            (None, 224, 224, 3)  0                                            
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D)       (None, 230, 230, 3)  0           input_4[0][0]                    
__________________________________________________________________________________________________
conv1 (Conv2D)                  (None, 112, 112, 64) 9472        conv1_pad[0][0]                  
__________________________________________________________________________________________________
bn_conv1 (BatchNormalization)   (None, 112, 112, 64) 256         conv1[0][0]                      
__________________________________________________________________________________________________
activation_283 (Activation)     (None, 112, 112, 64) 0           bn_conv1[0][0]                   
__________________________________________________________________________________________________
max_pooling2d_13 (MaxPooling2D) (None, 55, 55, 64)   0           activation_283[0][0]             
__________________________________________________________________________________________________
res2a_branch2a (Conv2D)         (None, 55, 55, 64)   4160        max_pooling2d_13[0][0]           
__________________________________________________________________________________________________
bn2a_branch2a (BatchNormalizati (None, 55, 55, 64)   256         res2a_branch2a[0][0]             
__________________________________________________________________________________________________
activation_284 (Activation)     (None, 55, 55, 64)   0           bn2a_branch2a[0][0]              
__________________________________________________________________________________________________
res2a_branch2b (Conv2D)         (None, 55, 55, 64)   36928       activation_284[0][0]             
__________________________________________________________________________________________________
bn2a_branch2b (BatchNormalizati (None, 55, 55, 64)   256         res2a_branch2b[0][0]             
__________________________________________________________________________________________________
activation_285 (Activation)     (None, 55, 55, 64)   0           bn2a_branch2b[0][0]              
__________________________________________________________________________________________________
res2a_branch2c (Conv2D)         (None, 55, 55, 256)  16640       activation_285[0][0]             
__________________________________________________________________________________________________
res2a_branch1 (Conv2D)          (None, 55, 55, 256)  16640       max_pooling2d_13[0][0]           
__________________________________________________________________________________________________
bn2a_branch2c (BatchNormalizati (None, 55, 55, 256)  1024        res2a_branch2c[0][0]             
__________________________________________________________________________________________________
bn2a_branch1 (BatchNormalizatio (None, 55, 55, 256)  1024        res2a_branch1[0][0]              
__________________________________________________________________________________________________
add_1 (Add)                     (None, 55, 55, 256)  0           bn2a_branch2c[0][0]              
                                                                 bn2a_branch1[0][0]               
__________________________________________________________________________________________________
activation_286 (Activation)     (None, 55, 55, 256)  0           add_1[0][0]                      
__________________________________________________________________________________________________
res2b_branch2a (Conv2D)         (None, 55, 55, 64)   16448       activation_286[0][0]             
__________________________________________________________________________________________________
bn2b_branch2a (BatchNormalizati (None, 55, 55, 64)   256         res2b_branch2a[0][0]             
__________________________________________________________________________________________________
activation_287 (Activation)     (None, 55, 55, 64)   0           bn2b_branch2a[0][0]              
__________________________________________________________________________________________________
res2b_branch2b (Conv2D)         (None, 55, 55, 64)   36928       activation_287[0][0]             
__________________________________________________________________________________________________
bn2b_branch2b (BatchNormalizati (None, 55, 55, 64)   256         res2b_branch2b[0][0]             
__________________________________________________________________________________________________
activation_288 (Activation)     (None, 55, 55, 64)   0           bn2b_branch2b[0][0]              
__________________________________________________________________________________________________
res2b_branch2c (Conv2D)         (None, 55, 55, 256)  16640       activation_288[0][0]             
__________________________________________________________________________________________________
bn2b_branch2c (BatchNormalizati (None, 55, 55, 256)  1024        res2b_branch2c[0][0]             
__________________________________________________________________________________________________
add_2 (Add)                     (None, 55, 55, 256)  0           bn2b_branch2c[0][0]              
                                                                 activation_286[0][0]             
__________________________________________________________________________________________________
activation_289 (Activation)     (None, 55, 55, 256)  0           add_2[0][0]                      
__________________________________________________________________________________________________
res2c_branch2a (Conv2D)         (None, 55, 55, 64)   16448       activation_289[0][0]             
__________________________________________________________________________________________________
bn2c_branch2a (BatchNormalizati (None, 55, 55, 64)   256         res2c_branch2a[0][0]             
__________________________________________________________________________________________________
activation_290 (Activation)     (None, 55, 55, 64)   0           bn2c_branch2a[0][0]              
__________________________________________________________________________________________________
res2c_branch2b (Conv2D)         (None, 55, 55, 64)   36928       activation_290[0][0]             
__________________________________________________________________________________________________
bn2c_branch2b (BatchNormalizati (None, 55, 55, 64)   256         res2c_branch2b[0][0]             
__________________________________________________________________________________________________
activation_291 (Activation)     (None, 55, 55, 64)   0           bn2c_branch2b[0][0]              
__________________________________________________________________________________________________
res2c_branch2c (Conv2D)         (None, 55, 55, 256)  16640       activation_291[0][0]             
__________________________________________________________________________________________________
bn2c_branch2c (BatchNormalizati (None, 55, 55, 256)  1024        res2c_branch2c[0][0]             
__________________________________________________________________________________________________
add_3 (Add)                     (None, 55, 55, 256)  0           bn2c_branch2c[0][0]              
                                                                 activation_289[0][0]             
__________________________________________________________________________________________________
activation_292 (Activation)     (None, 55, 55, 256)  0           add_3[0][0]                      
__________________________________________________________________________________________________
res3a_branch2a (Conv2D)         (None, 28, 28, 128)  32896       activation_292[0][0]             
__________________________________________________________________________________________________
bn3a_branch2a (BatchNormalizati (None, 28, 28, 128)  512         res3a_branch2a[0][0]             
__________________________________________________________________________________________________
activation_293 (Activation)     (None, 28, 28, 128)  0           bn3a_branch2a[0][0]              
__________________________________________________________________________________________________
res3a_branch2b (Conv2D)         (None, 28, 28, 128)  147584      activation_293[0][0]             
__________________________________________________________________________________________________
bn3a_branch2b (BatchNormalizati (None, 28, 28, 128)  512         res3a_branch2b[0][0]             
__________________________________________________________________________________________________
activation_294 (Activation)     (None, 28, 28, 128)  0           bn3a_branch2b[0][0]              
__________________________________________________________________________________________________
res3a_branch2c (Conv2D)         (None, 28, 28, 512)  66048       activation_294[0][0]             
__________________________________________________________________________________________________
res3a_branch1 (Conv2D)          (None, 28, 28, 512)  131584      activation_292[0][0]             
__________________________________________________________________________________________________
bn3a_branch2c (BatchNormalizati (None, 28, 28, 512)  2048        res3a_branch2c[0][0]             
__________________________________________________________________________________________________
bn3a_branch1 (BatchNormalizatio (None, 28, 28, 512)  2048        res3a_branch1[0][0]              
__________________________________________________________________________________________________
add_4 (Add)                     (None, 28, 28, 512)  0           bn3a_branch2c[0][0]              
                                                                 bn3a_branch1[0][0]               
__________________________________________________________________________________________________
activation_295 (Activation)     (None, 28, 28, 512)  0           add_4[0][0]                      
__________________________________________________________________________________________________
res3b_branch2a (Conv2D)         (None, 28, 28, 128)  65664       activation_295[0][0]             
__________________________________________________________________________________________________
bn3b_branch2a (BatchNormalizati (None, 28, 28, 128)  512         res3b_branch2a[0][0]             
__________________________________________________________________________________________________
activation_296 (Activation)     (None, 28, 28, 128)  0           bn3b_branch2a[0][0]              
__________________________________________________________________________________________________
res3b_branch2b (Conv2D)         (None, 28, 28, 128)  147584      activation_296[0][0]             
__________________________________________________________________________________________________
bn3b_branch2b (BatchNormalizati (None, 28, 28, 128)  512         res3b_branch2b[0][0]             
__________________________________________________________________________________________________
activation_297 (Activation)     (None, 28, 28, 128)  0           bn3b_branch2b[0][0]              
__________________________________________________________________________________________________
res3b_branch2c (Conv2D)         (None, 28, 28, 512)  66048       activation_297[0][0]             
__________________________________________________________________________________________________
bn3b_branch2c (BatchNormalizati (None, 28, 28, 512)  2048        res3b_branch2c[0][0]             
__________________________________________________________________________________________________
add_5 (Add)                     (None, 28, 28, 512)  0           bn3b_branch2c[0][0]              
                                                                 activation_295[0][0]             
__________________________________________________________________________________________________
activation_298 (Activation)     (None, 28, 28, 512)  0           add_5[0][0]                      
__________________________________________________________________________________________________
res3c_branch2a (Conv2D)         (None, 28, 28, 128)  65664       activation_298[0][0]             
__________________________________________________________________________________________________
bn3c_branch2a (BatchNormalizati (None, 28, 28, 128)  512         res3c_branch2a[0][0]             
__________________________________________________________________________________________________
activation_299 (Activation)     (None, 28, 28, 128)  0           bn3c_branch2a[0][0]              
__________________________________________________________________________________________________
res3c_branch2b (Conv2D)         (None, 28, 28, 128)  147584      activation_299[0][0]             
__________________________________________________________________________________________________
bn3c_branch2b (BatchNormalizati (None, 28, 28, 128)  512         res3c_branch2b[0][0]             
__________________________________________________________________________________________________
activation_300 (Activation)     (None, 28, 28, 128)  0           bn3c_branch2b[0][0]              
__________________________________________________________________________________________________
res3c_branch2c (Conv2D)         (None, 28, 28, 512)  66048       activation_300[0][0]             
__________________________________________________________________________________________________
bn3c_branch2c (BatchNormalizati (None, 28, 28, 512)  2048        res3c_branch2c[0][0]             
__________________________________________________________________________________________________
add_6 (Add)                     (None, 28, 28, 512)  0           bn3c_branch2c[0][0]              
                                                                 activation_298[0][0]             
__________________________________________________________________________________________________
activation_301 (Activation)     (None, 28, 28, 512)  0           add_6[0][0]                      
__________________________________________________________________________________________________
res3d_branch2a (Conv2D)         (None, 28, 28, 128)  65664       activation_301[0][0]             
__________________________________________________________________________________________________
bn3d_branch2a (BatchNormalizati (None, 28, 28, 128)  512         res3d_branch2a[0][0]             
__________________________________________________________________________________________________
activation_302 (Activation)     (None, 28, 28, 128)  0           bn3d_branch2a[0][0]              
__________________________________________________________________________________________________
res3d_branch2b (Conv2D)         (None, 28, 28, 128)  147584      activation_302[0][0]             
__________________________________________________________________________________________________
bn3d_branch2b (BatchNormalizati (None, 28, 28, 128)  512         res3d_branch2b[0][0]             
__________________________________________________________________________________________________
activation_303 (Activation)     (None, 28, 28, 128)  0           bn3d_branch2b[0][0]              
__________________________________________________________________________________________________
res3d_branch2c (Conv2D)         (None, 28, 28, 512)  66048       activation_303[0][0]             
__________________________________________________________________________________________________
bn3d_branch2c (BatchNormalizati (None, 28, 28, 512)  2048        res3d_branch2c[0][0]             
__________________________________________________________________________________________________
add_7 (Add)                     (None, 28, 28, 512)  0           bn3d_branch2c[0][0]              
                                                                 activation_301[0][0]             
__________________________________________________________________________________________________
activation_304 (Activation)     (None, 28, 28, 512)  0           add_7[0][0]                      
__________________________________________________________________________________________________
res4a_branch2a (Conv2D)         (None, 14, 14, 256)  131328      activation_304[0][0]             
__________________________________________________________________________________________________
bn4a_branch2a (BatchNormalizati (None, 14, 14, 256)  1024        res4a_branch2a[0][0]             
__________________________________________________________________________________________________
activation_305 (Activation)     (None, 14, 14, 256)  0           bn4a_branch2a[0][0]              
__________________________________________________________________________________________________
res4a_branch2b (Conv2D)         (None, 14, 14, 256)  590080      activation_305[0][0]             
__________________________________________________________________________________________________
bn4a_branch2b (BatchNormalizati (None, 14, 14, 256)  1024        res4a_branch2b[0][0]             
__________________________________________________________________________________________________
activation_306 (Activation)     (None, 14, 14, 256)  0           bn4a_branch2b[0][0]              
__________________________________________________________________________________________________
res4a_branch2c (Conv2D)         (None, 14, 14, 1024) 263168      activation_306[0][0]             
__________________________________________________________________________________________________
res4a_branch1 (Conv2D)          (None, 14, 14, 1024) 525312      activation_304[0][0]             
__________________________________________________________________________________________________
bn4a_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096        res4a_branch2c[0][0]             
__________________________________________________________________________________________________
bn4a_branch1 (BatchNormalizatio (None, 14, 14, 1024) 4096        res4a_branch1[0][0]              
__________________________________________________________________________________________________
add_8 (Add)                     (None, 14, 14, 1024) 0           bn4a_branch2c[0][0]              
                                                                 bn4a_branch1[0][0]               
__________________________________________________________________________________________________
activation_307 (Activation)     (None, 14, 14, 1024) 0           add_8[0][0]                      
__________________________________________________________________________________________________
res4b_branch2a (Conv2D)         (None, 14, 14, 256)  262400      activation_307[0][0]             
__________________________________________________________________________________________________
bn4b_branch2a (BatchNormalizati (None, 14, 14, 256)  1024        res4b_branch2a[0][0]             
__________________________________________________________________________________________________
activation_308 (Activation)     (None, 14, 14, 256)  0           bn4b_branch2a[0][0]              
__________________________________________________________________________________________________
res4b_branch2b (Conv2D)         (None, 14, 14, 256)  590080      activation_308[0][0]             
__________________________________________________________________________________________________
bn4b_branch2b (BatchNormalizati (None, 14, 14, 256)  1024        res4b_branch2b[0][0]             
__________________________________________________________________________________________________
activation_309 (Activation)     (None, 14, 14, 256)  0           bn4b_branch2b[0][0]              
__________________________________________________________________________________________________
res4b_branch2c (Conv2D)         (None, 14, 14, 1024) 263168      activation_309[0][0]             
__________________________________________________________________________________________________
bn4b_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096        res4b_branch2c[0][0]             
__________________________________________________________________________________________________
add_9 (Add)                     (None, 14, 14, 1024) 0           bn4b_branch2c[0][0]              
                                                                 activation_307[0][0]             
__________________________________________________________________________________________________
activation_310 (Activation)     (None, 14, 14, 1024) 0           add_9[0][0]                      
__________________________________________________________________________________________________
res4c_branch2a (Conv2D)         (None, 14, 14, 256)  262400      activation_310[0][0]             
__________________________________________________________________________________________________
bn4c_branch2a (BatchNormalizati (None, 14, 14, 256)  1024        res4c_branch2a[0][0]             
__________________________________________________________________________________________________
activation_311 (Activation)     (None, 14, 14, 256)  0           bn4c_branch2a[0][0]              
__________________________________________________________________________________________________
res4c_branch2b (Conv2D)         (None, 14, 14, 256)  590080      activation_311[0][0]             
__________________________________________________________________________________________________
bn4c_branch2b (BatchNormalizati (None, 14, 14, 256)  1024        res4c_branch2b[0][0]             
__________________________________________________________________________________________________
activation_312 (Activation)     (None, 14, 14, 256)  0           bn4c_branch2b[0][0]              
__________________________________________________________________________________________________
res4c_branch2c (Conv2D)         (None, 14, 14, 1024) 263168      activation_312[0][0]             
__________________________________________________________________________________________________
bn4c_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096        res4c_branch2c[0][0]             
__________________________________________________________________________________________________
add_10 (Add)                    (None, 14, 14, 1024) 0           bn4c_branch2c[0][0]              
                                                                 activation_310[0][0]             
__________________________________________________________________________________________________
activation_313 (Activation)     (None, 14, 14, 1024) 0           add_10[0][0]                     
__________________________________________________________________________________________________
res4d_branch2a (Conv2D)         (None, 14, 14, 256)  262400      activation_313[0][0]             
__________________________________________________________________________________________________
bn4d_branch2a (BatchNormalizati (None, 14, 14, 256)  1024        res4d_branch2a[0][0]             
__________________________________________________________________________________________________
activation_314 (Activation)     (None, 14, 14, 256)  0           bn4d_branch2a[0][0]              
__________________________________________________________________________________________________
res4d_branch2b (Conv2D)         (None, 14, 14, 256)  590080      activation_314[0][0]             
__________________________________________________________________________________________________
bn4d_branch2b (BatchNormalizati (None, 14, 14, 256)  1024        res4d_branch2b[0][0]             
__________________________________________________________________________________________________
activation_315 (Activation)     (None, 14, 14, 256)  0           bn4d_branch2b[0][0]              
__________________________________________________________________________________________________
res4d_branch2c (Conv2D)         (None, 14, 14, 1024) 263168      activation_315[0][0]             
__________________________________________________________________________________________________
bn4d_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096        res4d_branch2c[0][0]             
__________________________________________________________________________________________________
add_11 (Add)                    (None, 14, 14, 1024) 0           bn4d_branch2c[0][0]              
                                                                 activation_313[0][0]             
__________________________________________________________________________________________________
activation_316 (Activation)     (None, 14, 14, 1024) 0           add_11[0][0]                     
__________________________________________________________________________________________________
res4e_branch2a (Conv2D)         (None, 14, 14, 256)  262400      activation_316[0][0]             
__________________________________________________________________________________________________
bn4e_branch2a (BatchNormalizati (None, 14, 14, 256)  1024        res4e_branch2a[0][0]             
__________________________________________________________________________________________________
activation_317 (Activation)     (None, 14, 14, 256)  0           bn4e_branch2a[0][0]              
__________________________________________________________________________________________________
res4e_branch2b (Conv2D)         (None, 14, 14, 256)  590080      activation_317[0][0]             
__________________________________________________________________________________________________
bn4e_branch2b (BatchNormalizati (None, 14, 14, 256)  1024        res4e_branch2b[0][0]             
__________________________________________________________________________________________________
activation_318 (Activation)     (None, 14, 14, 256)  0           bn4e_branch2b[0][0]              
__________________________________________________________________________________________________
res4e_branch2c (Conv2D)         (None, 14, 14, 1024) 263168      activation_318[0][0]             
__________________________________________________________________________________________________
bn4e_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096        res4e_branch2c[0][0]             
__________________________________________________________________________________________________
add_12 (Add)                    (None, 14, 14, 1024) 0           bn4e_branch2c[0][0]              
                                                                 activation_316[0][0]             
__________________________________________________________________________________________________
activation_319 (Activation)     (None, 14, 14, 1024) 0           add_12[0][0]                     
__________________________________________________________________________________________________
res4f_branch2a (Conv2D)         (None, 14, 14, 256)  262400      activation_319[0][0]             
__________________________________________________________________________________________________
bn4f_branch2a (BatchNormalizati (None, 14, 14, 256)  1024        res4f_branch2a[0][0]             
__________________________________________________________________________________________________
activation_320 (Activation)     (None, 14, 14, 256)  0           bn4f_branch2a[0][0]              
__________________________________________________________________________________________________
res4f_branch2b (Conv2D)         (None, 14, 14, 256)  590080      activation_320[0][0]             
__________________________________________________________________________________________________
bn4f_branch2b (BatchNormalizati (None, 14, 14, 256)  1024        res4f_branch2b[0][0]             
__________________________________________________________________________________________________
activation_321 (Activation)     (None, 14, 14, 256)  0           bn4f_branch2b[0][0]              
__________________________________________________________________________________________________
res4f_branch2c (Conv2D)         (None, 14, 14, 1024) 263168      activation_321[0][0]             
__________________________________________________________________________________________________
bn4f_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096        res4f_branch2c[0][0]             
__________________________________________________________________________________________________
add_13 (Add)                    (None, 14, 14, 1024) 0           bn4f_branch2c[0][0]              
                                                                 activation_319[0][0]             
__________________________________________________________________________________________________
activation_322 (Activation)     (None, 14, 14, 1024) 0           add_13[0][0]                     
__________________________________________________________________________________________________
res5a_branch2a (Conv2D)         (None, 7, 7, 512)    524800      activation_322[0][0]             
__________________________________________________________________________________________________
bn5a_branch2a (BatchNormalizati (None, 7, 7, 512)    2048        res5a_branch2a[0][0]             
__________________________________________________________________________________________________
activation_323 (Activation)     (None, 7, 7, 512)    0           bn5a_branch2a[0][0]              
__________________________________________________________________________________________________
res5a_branch2b (Conv2D)         (None, 7, 7, 512)    2359808     activation_323[0][0]             
__________________________________________________________________________________________________
bn5a_branch2b (BatchNormalizati (None, 7, 7, 512)    2048        res5a_branch2b[0][0]             
__________________________________________________________________________________________________
activation_324 (Activation)     (None, 7, 7, 512)    0           bn5a_branch2b[0][0]              
__________________________________________________________________________________________________
res5a_branch2c (Conv2D)         (None, 7, 7, 2048)   1050624     activation_324[0][0]             
__________________________________________________________________________________________________
res5a_branch1 (Conv2D)          (None, 7, 7, 2048)   2099200     activation_322[0][0]             
__________________________________________________________________________________________________
bn5a_branch2c (BatchNormalizati (None, 7, 7, 2048)   8192        res5a_branch2c[0][0]             
__________________________________________________________________________________________________
bn5a_branch1 (BatchNormalizatio (None, 7, 7, 2048)   8192        res5a_branch1[0][0]              
__________________________________________________________________________________________________
add_14 (Add)                    (None, 7, 7, 2048)   0           bn5a_branch2c[0][0]              
                                                                 bn5a_branch1[0][0]               
__________________________________________________________________________________________________
activation_325 (Activation)     (None, 7, 7, 2048)   0           add_14[0][0]                     
__________________________________________________________________________________________________
res5b_branch2a (Conv2D)         (None, 7, 7, 512)    1049088     activation_325[0][0]             
__________________________________________________________________________________________________
bn5b_branch2a (BatchNormalizati (None, 7, 7, 512)    2048        res5b_branch2a[0][0]             
__________________________________________________________________________________________________
activation_326 (Activation)     (None, 7, 7, 512)    0           bn5b_branch2a[0][0]              
__________________________________________________________________________________________________
res5b_branch2b (Conv2D)         (None, 7, 7, 512)    2359808     activation_326[0][0]             
__________________________________________________________________________________________________
bn5b_branch2b (BatchNormalizati (None, 7, 7, 512)    2048        res5b_branch2b[0][0]             
__________________________________________________________________________________________________
activation_327 (Activation)     (None, 7, 7, 512)    0           bn5b_branch2b[0][0]              
__________________________________________________________________________________________________
res5b_branch2c (Conv2D)         (None, 7, 7, 2048)   1050624     activation_327[0][0]             
__________________________________________________________________________________________________
bn5b_branch2c (BatchNormalizati (None, 7, 7, 2048)   8192        res5b_branch2c[0][0]             
__________________________________________________________________________________________________
add_15 (Add)                    (None, 7, 7, 2048)   0           bn5b_branch2c[0][0]              
                                                                 activation_325[0][0]             
__________________________________________________________________________________________________
activation_328 (Activation)     (None, 7, 7, 2048)   0           add_15[0][0]                     
__________________________________________________________________________________________________
res5c_branch2a (Conv2D)         (None, 7, 7, 512)    1049088     activation_328[0][0]             
__________________________________________________________________________________________________
bn5c_branch2a (BatchNormalizati (None, 7, 7, 512)    2048        res5c_branch2a[0][0]             
__________________________________________________________________________________________________
activation_329 (Activation)     (None, 7, 7, 512)    0           bn5c_branch2a[0][0]              
__________________________________________________________________________________________________
res5c_branch2b (Conv2D)         (None, 7, 7, 512)    2359808     activation_329[0][0]             
__________________________________________________________________________________________________
bn5c_branch2b (BatchNormalizati (None, 7, 7, 512)    2048        res5c_branch2b[0][0]             
__________________________________________________________________________________________________
activation_330 (Activation)     (None, 7, 7, 512)    0           bn5c_branch2b[0][0]              
__________________________________________________________________________________________________
res5c_branch2c (Conv2D)         (None, 7, 7, 2048)   1050624     activation_330[0][0]             
__________________________________________________________________________________________________
bn5c_branch2c (BatchNormalizati (None, 7, 7, 2048)   8192        res5c_branch2c[0][0]             
__________________________________________________________________________________________________
add_16 (Add)                    (None, 7, 7, 2048)   0           bn5c_branch2c[0][0]              
                                                                 activation_328[0][0]             
__________________________________________________________________________________________________
activation_331 (Activation)     (None, 7, 7, 2048)   0           add_16[0][0]                     
__________________________________________________________________________________________________
avg_pool (AveragePooling2D)     (None, 1, 1, 2048)   0           activation_331[0][0]             
__________________________________________________________________________________________________
global_average_pooling2d_4 (Glo (None, 2048)         0           avg_pool[0][0]                   
__________________________________________________________________________________________________
dropout_4 (Dropout)             (None, 2048)         0           global_average_pooling2d_4[0][0] 
__________________________________________________________________________________________________
dense_4 (Dense)                 (None, 1)            2049        dropout_4[0][0]                  
==================================================================================================
Total params: 23,589,761
Trainable params: 2,049
Non-trainable params: 23,587,712
__________________________________________________________________________________________________
In [16]:
earlystop = EarlyStopping(monitor='val_loss', patience=5, verbose=0, mode='auto')

model_ResNet50.compile(loss = 'binary_crossentropy', optimizer = 'Adam', metrics =['accuracy'])
model_ResNet50_history = model_ResNet50.fit_generator(train_gen_224(X_train, y_train, batch_size=batch_size),
                                                    steps_per_epoch=len(y_train)//batch_size, epochs=epochs,
                                                    validation_data = val_gen_224(X_val, y_val, batch_size=batch_size),
                                                    validation_steps=len(y_val)//batch_size,
                                                    callbacks = [earlystop],
                                                    verbose=2)
Epoch 1/10
 - 179s - loss: 0.1637 - acc: 0.9333 - val_loss: 0.0772 - val_acc: 0.9714
Epoch 2/10
 - 177s - loss: 0.0959 - acc: 0.9627 - val_loss: 0.0837 - val_acc: 0.9708
Epoch 3/10
 - 176s - loss: 0.0843 - acc: 0.9667 - val_loss: 0.0884 - val_acc: 0.9706
Epoch 4/10
 - 176s - loss: 0.0853 - acc: 0.9667 - val_loss: 0.0775 - val_acc: 0.9738
Epoch 5/10
 - 176s - loss: 0.0786 - acc: 0.9691 - val_loss: 0.0771 - val_acc: 0.9740
Epoch 6/10
 - 176s - loss: 0.0796 - acc: 0.9691 - val_loss: 0.0849 - val_acc: 0.9728
Epoch 7/10
 - 178s - loss: 0.0779 - acc: 0.9697 - val_loss: 0.0890 - val_acc: 0.9716
Epoch 8/10
 - 178s - loss: 0.0783 - acc: 0.9695 - val_loss: 0.1010 - val_acc: 0.9698
Epoch 9/10
 - 177s - loss: 0.0744 - acc: 0.9723 - val_loss: 0.0823 - val_acc: 0.9738
Epoch 10/10
 - 176s - loss: 0.0737 - acc: 0.9718 - val_loss: 0.0997 - val_acc: 0.9706
In [17]:
from matplotlib import pyplot as plt  
history=model_ResNet50_history  
# summarize history for accuracy
plt.plot(history.history['acc'])  
plt.plot(history.history['val_acc'])
plt.title('model accuracy')  
plt.ylabel('accuracy')  
plt.xlabel('epoch')  
plt.grid()
plt.legend(['train', 'test'], loc='lower right')  
plt.show()  
# summarize history for loss  
plt.plot(history.history['loss'])  
plt.plot(history.history['val_loss'])  
plt.title('model loss')  
plt.ylabel('loss')  
plt.xlabel('epoch')  
plt.grid()
plt.legend(['train', 'test'], loc='upper right')  
plt.show()  
In [18]:
base_model = Xception(include_top=False, weights='imagenet', input_shape=(299,299,3),pooling='avg')
for layer in base_model.layers:
    layer.trainable = False
x = Dropout(0.5)(base_model.output)
x = Dense(1, activation='sigmoid')(x)
model_Xception = Model(inputs = base_model.input, outputs=x, name = 'model_Xception')
model_Xception.summary()
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_5 (InputLayer)            (None, 299, 299, 3)  0                                            
__________________________________________________________________________________________________
block1_conv1 (Conv2D)           (None, 149, 149, 32) 864         input_5[0][0]                    
__________________________________________________________________________________________________
block1_conv1_bn (BatchNormaliza (None, 149, 149, 32) 128         block1_conv1[0][0]               
__________________________________________________________________________________________________
block1_conv1_act (Activation)   (None, 149, 149, 32) 0           block1_conv1_bn[0][0]            
__________________________________________________________________________________________________
block1_conv2 (Conv2D)           (None, 147, 147, 64) 18432       block1_conv1_act[0][0]           
__________________________________________________________________________________________________
block1_conv2_bn (BatchNormaliza (None, 147, 147, 64) 256         block1_conv2[0][0]               
__________________________________________________________________________________________________
block1_conv2_act (Activation)   (None, 147, 147, 64) 0           block1_conv2_bn[0][0]            
__________________________________________________________________________________________________
block2_sepconv1 (SeparableConv2 (None, 147, 147, 128 8768        block1_conv2_act[0][0]           
__________________________________________________________________________________________________
block2_sepconv1_bn (BatchNormal (None, 147, 147, 128 512         block2_sepconv1[0][0]            
__________________________________________________________________________________________________
block2_sepconv2_act (Activation (None, 147, 147, 128 0           block2_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block2_sepconv2 (SeparableConv2 (None, 147, 147, 128 17536       block2_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block2_sepconv2_bn (BatchNormal (None, 147, 147, 128 512         block2_sepconv2[0][0]            
__________________________________________________________________________________________________
conv2d_283 (Conv2D)             (None, 74, 74, 128)  8192        block1_conv2_act[0][0]           
__________________________________________________________________________________________________
block2_pool (MaxPooling2D)      (None, 74, 74, 128)  0           block2_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
batch_normalization_283 (BatchN (None, 74, 74, 128)  512         conv2d_283[0][0]                 
__________________________________________________________________________________________________
add_17 (Add)                    (None, 74, 74, 128)  0           block2_pool[0][0]                
                                                                 batch_normalization_283[0][0]    
__________________________________________________________________________________________________
block3_sepconv1_act (Activation (None, 74, 74, 128)  0           add_17[0][0]                     
__________________________________________________________________________________________________
block3_sepconv1 (SeparableConv2 (None, 74, 74, 256)  33920       block3_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block3_sepconv1_bn (BatchNormal (None, 74, 74, 256)  1024        block3_sepconv1[0][0]            
__________________________________________________________________________________________________
block3_sepconv2_act (Activation (None, 74, 74, 256)  0           block3_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block3_sepconv2 (SeparableConv2 (None, 74, 74, 256)  67840       block3_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block3_sepconv2_bn (BatchNormal (None, 74, 74, 256)  1024        block3_sepconv2[0][0]            
__________________________________________________________________________________________________
conv2d_284 (Conv2D)             (None, 37, 37, 256)  32768       add_17[0][0]                     
__________________________________________________________________________________________________
block3_pool (MaxPooling2D)      (None, 37, 37, 256)  0           block3_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
batch_normalization_284 (BatchN (None, 37, 37, 256)  1024        conv2d_284[0][0]                 
__________________________________________________________________________________________________
add_18 (Add)                    (None, 37, 37, 256)  0           block3_pool[0][0]                
                                                                 batch_normalization_284[0][0]    
__________________________________________________________________________________________________
block4_sepconv1_act (Activation (None, 37, 37, 256)  0           add_18[0][0]                     
__________________________________________________________________________________________________
block4_sepconv1 (SeparableConv2 (None, 37, 37, 728)  188672      block4_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block4_sepconv1_bn (BatchNormal (None, 37, 37, 728)  2912        block4_sepconv1[0][0]            
__________________________________________________________________________________________________
block4_sepconv2_act (Activation (None, 37, 37, 728)  0           block4_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block4_sepconv2 (SeparableConv2 (None, 37, 37, 728)  536536      block4_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block4_sepconv2_bn (BatchNormal (None, 37, 37, 728)  2912        block4_sepconv2[0][0]            
__________________________________________________________________________________________________
conv2d_285 (Conv2D)             (None, 19, 19, 728)  186368      add_18[0][0]                     
__________________________________________________________________________________________________
block4_pool (MaxPooling2D)      (None, 19, 19, 728)  0           block4_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
batch_normalization_285 (BatchN (None, 19, 19, 728)  2912        conv2d_285[0][0]                 
__________________________________________________________________________________________________
add_19 (Add)                    (None, 19, 19, 728)  0           block4_pool[0][0]                
                                                                 batch_normalization_285[0][0]    
__________________________________________________________________________________________________
block5_sepconv1_act (Activation (None, 19, 19, 728)  0           add_19[0][0]                     
__________________________________________________________________________________________________
block5_sepconv1 (SeparableConv2 (None, 19, 19, 728)  536536      block5_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block5_sepconv1_bn (BatchNormal (None, 19, 19, 728)  2912        block5_sepconv1[0][0]            
__________________________________________________________________________________________________
block5_sepconv2_act (Activation (None, 19, 19, 728)  0           block5_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block5_sepconv2 (SeparableConv2 (None, 19, 19, 728)  536536      block5_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block5_sepconv2_bn (BatchNormal (None, 19, 19, 728)  2912        block5_sepconv2[0][0]            
__________________________________________________________________________________________________
block5_sepconv3_act (Activation (None, 19, 19, 728)  0           block5_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
block5_sepconv3 (SeparableConv2 (None, 19, 19, 728)  536536      block5_sepconv3_act[0][0]        
__________________________________________________________________________________________________
block5_sepconv3_bn (BatchNormal (None, 19, 19, 728)  2912        block5_sepconv3[0][0]            
__________________________________________________________________________________________________
add_20 (Add)                    (None, 19, 19, 728)  0           block5_sepconv3_bn[0][0]         
                                                                 add_19[0][0]                     
__________________________________________________________________________________________________
block6_sepconv1_act (Activation (None, 19, 19, 728)  0           add_20[0][0]                     
__________________________________________________________________________________________________
block6_sepconv1 (SeparableConv2 (None, 19, 19, 728)  536536      block6_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block6_sepconv1_bn (BatchNormal (None, 19, 19, 728)  2912        block6_sepconv1[0][0]            
__________________________________________________________________________________________________
block6_sepconv2_act (Activation (None, 19, 19, 728)  0           block6_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block6_sepconv2 (SeparableConv2 (None, 19, 19, 728)  536536      block6_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block6_sepconv2_bn (BatchNormal (None, 19, 19, 728)  2912        block6_sepconv2[0][0]            
__________________________________________________________________________________________________
block6_sepconv3_act (Activation (None, 19, 19, 728)  0           block6_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
block6_sepconv3 (SeparableConv2 (None, 19, 19, 728)  536536      block6_sepconv3_act[0][0]        
__________________________________________________________________________________________________
block6_sepconv3_bn (BatchNormal (None, 19, 19, 728)  2912        block6_sepconv3[0][0]            
__________________________________________________________________________________________________
add_21 (Add)                    (None, 19, 19, 728)  0           block6_sepconv3_bn[0][0]         
                                                                 add_20[0][0]                     
__________________________________________________________________________________________________
block7_sepconv1_act (Activation (None, 19, 19, 728)  0           add_21[0][0]                     
__________________________________________________________________________________________________
block7_sepconv1 (SeparableConv2 (None, 19, 19, 728)  536536      block7_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block7_sepconv1_bn (BatchNormal (None, 19, 19, 728)  2912        block7_sepconv1[0][0]            
__________________________________________________________________________________________________
block7_sepconv2_act (Activation (None, 19, 19, 728)  0           block7_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block7_sepconv2 (SeparableConv2 (None, 19, 19, 728)  536536      block7_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block7_sepconv2_bn (BatchNormal (None, 19, 19, 728)  2912        block7_sepconv2[0][0]            
__________________________________________________________________________________________________
block7_sepconv3_act (Activation (None, 19, 19, 728)  0           block7_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
block7_sepconv3 (SeparableConv2 (None, 19, 19, 728)  536536      block7_sepconv3_act[0][0]        
__________________________________________________________________________________________________
block7_sepconv3_bn (BatchNormal (None, 19, 19, 728)  2912        block7_sepconv3[0][0]            
__________________________________________________________________________________________________
add_22 (Add)                    (None, 19, 19, 728)  0           block7_sepconv3_bn[0][0]         
                                                                 add_21[0][0]                     
__________________________________________________________________________________________________
block8_sepconv1_act (Activation (None, 19, 19, 728)  0           add_22[0][0]                     
__________________________________________________________________________________________________
block8_sepconv1 (SeparableConv2 (None, 19, 19, 728)  536536      block8_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block8_sepconv1_bn (BatchNormal (None, 19, 19, 728)  2912        block8_sepconv1[0][0]            
__________________________________________________________________________________________________
block8_sepconv2_act (Activation (None, 19, 19, 728)  0           block8_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block8_sepconv2 (SeparableConv2 (None, 19, 19, 728)  536536      block8_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block8_sepconv2_bn (BatchNormal (None, 19, 19, 728)  2912        block8_sepconv2[0][0]            
__________________________________________________________________________________________________
block8_sepconv3_act (Activation (None, 19, 19, 728)  0           block8_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
block8_sepconv3 (SeparableConv2 (None, 19, 19, 728)  536536      block8_sepconv3_act[0][0]        
__________________________________________________________________________________________________
block8_sepconv3_bn (BatchNormal (None, 19, 19, 728)  2912        block8_sepconv3[0][0]            
__________________________________________________________________________________________________
add_23 (Add)                    (None, 19, 19, 728)  0           block8_sepconv3_bn[0][0]         
                                                                 add_22[0][0]                     
__________________________________________________________________________________________________
block9_sepconv1_act (Activation (None, 19, 19, 728)  0           add_23[0][0]                     
__________________________________________________________________________________________________
block9_sepconv1 (SeparableConv2 (None, 19, 19, 728)  536536      block9_sepconv1_act[0][0]        
__________________________________________________________________________________________________
block9_sepconv1_bn (BatchNormal (None, 19, 19, 728)  2912        block9_sepconv1[0][0]            
__________________________________________________________________________________________________
block9_sepconv2_act (Activation (None, 19, 19, 728)  0           block9_sepconv1_bn[0][0]         
__________________________________________________________________________________________________
block9_sepconv2 (SeparableConv2 (None, 19, 19, 728)  536536      block9_sepconv2_act[0][0]        
__________________________________________________________________________________________________
block9_sepconv2_bn (BatchNormal (None, 19, 19, 728)  2912        block9_sepconv2[0][0]            
__________________________________________________________________________________________________
block9_sepconv3_act (Activation (None, 19, 19, 728)  0           block9_sepconv2_bn[0][0]         
__________________________________________________________________________________________________
block9_sepconv3 (SeparableConv2 (None, 19, 19, 728)  536536      block9_sepconv3_act[0][0]        
__________________________________________________________________________________________________
block9_sepconv3_bn (BatchNormal (None, 19, 19, 728)  2912        block9_sepconv3[0][0]            
__________________________________________________________________________________________________
add_24 (Add)                    (None, 19, 19, 728)  0           block9_sepconv3_bn[0][0]         
                                                                 add_23[0][0]                     
__________________________________________________________________________________________________
block10_sepconv1_act (Activatio (None, 19, 19, 728)  0           add_24[0][0]                     
__________________________________________________________________________________________________
block10_sepconv1 (SeparableConv (None, 19, 19, 728)  536536      block10_sepconv1_act[0][0]       
__________________________________________________________________________________________________
block10_sepconv1_bn (BatchNorma (None, 19, 19, 728)  2912        block10_sepconv1[0][0]           
__________________________________________________________________________________________________
block10_sepconv2_act (Activatio (None, 19, 19, 728)  0           block10_sepconv1_bn[0][0]        
__________________________________________________________________________________________________
block10_sepconv2 (SeparableConv (None, 19, 19, 728)  536536      block10_sepconv2_act[0][0]       
__________________________________________________________________________________________________
block10_sepconv2_bn (BatchNorma (None, 19, 19, 728)  2912        block10_sepconv2[0][0]           
__________________________________________________________________________________________________
block10_sepconv3_act (Activatio (None, 19, 19, 728)  0           block10_sepconv2_bn[0][0]        
__________________________________________________________________________________________________
block10_sepconv3 (SeparableConv (None, 19, 19, 728)  536536      block10_sepconv3_act[0][0]       
__________________________________________________________________________________________________
block10_sepconv3_bn (BatchNorma (None, 19, 19, 728)  2912        block10_sepconv3[0][0]           
__________________________________________________________________________________________________
add_25 (Add)                    (None, 19, 19, 728)  0           block10_sepconv3_bn[0][0]        
                                                                 add_24[0][0]                     
__________________________________________________________________________________________________
block11_sepconv1_act (Activatio (None, 19, 19, 728)  0           add_25[0][0]                     
__________________________________________________________________________________________________
block11_sepconv1 (SeparableConv (None, 19, 19, 728)  536536      block11_sepconv1_act[0][0]       
__________________________________________________________________________________________________
block11_sepconv1_bn (BatchNorma (None, 19, 19, 728)  2912        block11_sepconv1[0][0]           
__________________________________________________________________________________________________
block11_sepconv2_act (Activatio (None, 19, 19, 728)  0           block11_sepconv1_bn[0][0]        
__________________________________________________________________________________________________
block11_sepconv2 (SeparableConv (None, 19, 19, 728)  536536      block11_sepconv2_act[0][0]       
__________________________________________________________________________________________________
block11_sepconv2_bn (BatchNorma (None, 19, 19, 728)  2912        block11_sepconv2[0][0]           
__________________________________________________________________________________________________
block11_sepconv3_act (Activatio (None, 19, 19, 728)  0           block11_sepconv2_bn[0][0]        
__________________________________________________________________________________________________
block11_sepconv3 (SeparableConv (None, 19, 19, 728)  536536      block11_sepconv3_act[0][0]       
__________________________________________________________________________________________________
block11_sepconv3_bn (BatchNorma (None, 19, 19, 728)  2912        block11_sepconv3[0][0]           
__________________________________________________________________________________________________
add_26 (Add)                    (None, 19, 19, 728)  0           block11_sepconv3_bn[0][0]        
                                                                 add_25[0][0]                     
__________________________________________________________________________________________________
block12_sepconv1_act (Activatio (None, 19, 19, 728)  0           add_26[0][0]                     
__________________________________________________________________________________________________
block12_sepconv1 (SeparableConv (None, 19, 19, 728)  536536      block12_sepconv1_act[0][0]       
__________________________________________________________________________________________________
block12_sepconv1_bn (BatchNorma (None, 19, 19, 728)  2912        block12_sepconv1[0][0]           
__________________________________________________________________________________________________
block12_sepconv2_act (Activatio (None, 19, 19, 728)  0           block12_sepconv1_bn[0][0]        
__________________________________________________________________________________________________
block12_sepconv2 (SeparableConv (None, 19, 19, 728)  536536      block12_sepconv2_act[0][0]       
__________________________________________________________________________________________________
block12_sepconv2_bn (BatchNorma (None, 19, 19, 728)  2912        block12_sepconv2[0][0]           
__________________________________________________________________________________________________
block12_sepconv3_act (Activatio (None, 19, 19, 728)  0           block12_sepconv2_bn[0][0]        
__________________________________________________________________________________________________
block12_sepconv3 (SeparableConv (None, 19, 19, 728)  536536      block12_sepconv3_act[0][0]       
__________________________________________________________________________________________________
block12_sepconv3_bn (BatchNorma (None, 19, 19, 728)  2912        block12_sepconv3[0][0]           
__________________________________________________________________________________________________
add_27 (Add)                    (None, 19, 19, 728)  0           block12_sepconv3_bn[0][0]        
                                                                 add_26[0][0]                     
__________________________________________________________________________________________________
block13_sepconv1_act (Activatio (None, 19, 19, 728)  0           add_27[0][0]                     
__________________________________________________________________________________________________
block13_sepconv1 (SeparableConv (None, 19, 19, 728)  536536      block13_sepconv1_act[0][0]       
__________________________________________________________________________________________________
block13_sepconv1_bn (BatchNorma (None, 19, 19, 728)  2912        block13_sepconv1[0][0]           
__________________________________________________________________________________________________
block13_sepconv2_act (Activatio (None, 19, 19, 728)  0           block13_sepconv1_bn[0][0]        
__________________________________________________________________________________________________
block13_sepconv2 (SeparableConv (None, 19, 19, 1024) 752024      block13_sepconv2_act[0][0]       
__________________________________________________________________________________________________
block13_sepconv2_bn (BatchNorma (None, 19, 19, 1024) 4096        block13_sepconv2[0][0]           
__________________________________________________________________________________________________
conv2d_286 (Conv2D)             (None, 10, 10, 1024) 745472      add_27[0][0]                     
__________________________________________________________________________________________________
block13_pool (MaxPooling2D)     (None, 10, 10, 1024) 0           block13_sepconv2_bn[0][0]        
__________________________________________________________________________________________________
batch_normalization_286 (BatchN (None, 10, 10, 1024) 4096        conv2d_286[0][0]                 
__________________________________________________________________________________________________
add_28 (Add)                    (None, 10, 10, 1024) 0           block13_pool[0][0]               
                                                                 batch_normalization_286[0][0]    
__________________________________________________________________________________________________
block14_sepconv1 (SeparableConv (None, 10, 10, 1536) 1582080     add_28[0][0]                     
__________________________________________________________________________________________________
block14_sepconv1_bn (BatchNorma (None, 10, 10, 1536) 6144        block14_sepconv1[0][0]           
__________________________________________________________________________________________________
block14_sepconv1_act (Activatio (None, 10, 10, 1536) 0           block14_sepconv1_bn[0][0]        
__________________________________________________________________________________________________
block14_sepconv2 (SeparableConv (None, 10, 10, 2048) 3159552     block14_sepconv1_act[0][0]       
__________________________________________________________________________________________________
block14_sepconv2_bn (BatchNorma (None, 10, 10, 2048) 8192        block14_sepconv2[0][0]           
__________________________________________________________________________________________________
block14_sepconv2_act (Activatio (None, 10, 10, 2048) 0           block14_sepconv2_bn[0][0]        
__________________________________________________________________________________________________
global_average_pooling2d_5 (Glo (None, 2048)         0           block14_sepconv2_act[0][0]       
__________________________________________________________________________________________________
dropout_5 (Dropout)             (None, 2048)         0           global_average_pooling2d_5[0][0] 
__________________________________________________________________________________________________
dense_5 (Dense)                 (None, 1)            2049        dropout_5[0][0]                  
==================================================================================================
Total params: 20,863,529
Trainable params: 2,049
Non-trainable params: 20,861,480
__________________________________________________________________________________________________
In [19]:
earlystop = EarlyStopping(monitor='val_loss', patience=5, verbose=0, mode='auto')

model_Xception.compile(loss = 'binary_crossentropy', optimizer = 'Adam', metrics =['accuracy'])
model_Xception_history = model_Xception.fit_generator(train_generator(X_train, y_train, batch_size=batch_size),
                                                    steps_per_epoch=len(y_train)//batch_size, epochs=epochs,
                                                    validation_data = val_generator(X_val, y_val, batch_size=batch_size),
                                                    validation_steps=len(y_val)//batch_size,
                                                    callbacks = [earlystop],
                                                    verbose=2)
Epoch 1/10
 - 367s - loss: 0.1356 - acc: 0.9645 - val_loss: 0.0901 - val_acc: 0.9768
Epoch 2/10
 - 364s - loss: 0.0630 - acc: 0.9799 - val_loss: 0.0783 - val_acc: 0.9776
Epoch 3/10
 - 364s - loss: 0.0546 - acc: 0.9820 - val_loss: 0.0801 - val_acc: 0.9764
Epoch 4/10
 - 364s - loss: 0.0487 - acc: 0.9829 - val_loss: 0.0758 - val_acc: 0.9774
Epoch 5/10
 - 364s - loss: 0.0490 - acc: 0.9825 - val_loss: 0.0791 - val_acc: 0.9756
Epoch 6/10
 - 364s - loss: 0.0472 - acc: 0.9834 - val_loss: 0.0722 - val_acc: 0.9780
Epoch 7/10
 - 363s - loss: 0.0447 - acc: 0.9843 - val_loss: 0.0696 - val_acc: 0.9798
Epoch 8/10
 - 364s - loss: 0.0438 - acc: 0.9839 - val_loss: 0.0754 - val_acc: 0.9772
Epoch 9/10
 - 364s - loss: 0.0444 - acc: 0.9840 - val_loss: 0.0729 - val_acc: 0.9784
Epoch 10/10
 - 363s - loss: 0.0424 - acc: 0.9848 - val_loss: 0.0734 - val_acc: 0.9780
In [20]:
from matplotlib import pyplot as plt  
history=model_Xception_history  
# summarize history for accuracy
plt.plot(history.history['acc'])  
plt.plot(history.history['val_acc'])
plt.title('model accuracy')  
plt.ylabel('accuracy')  
plt.xlabel('epoch')  
plt.grid()
plt.legend(['train', 'test'], loc='lower right')  
plt.show()  
# summarize history for loss  
plt.plot(history.history['loss'])  
plt.plot(history.history['val_loss'])  
plt.title('model loss')  
plt.ylabel('loss')  
plt.xlabel('epoch')  
plt.grid()
plt.legend(['train', 'test'], loc='upper right')  
plt.show()  
In [ ]:
# test generator
n_test = 12500
X_test = np.zeros((n_test,299,299,3),dtype=np.uint8)
for i in tqdm(range(12500)):
    j = i+1
    X_test[i] = cv2.resize(cv2.imread('test/test/%d.jpg' % j), (299,299))
    
def test_generator(X, batch_size): 
    datalen = len(X)
    counter = datalen//batch_size
    while (True):
        for i in range(counter):
            yield (X[i * batch_size:(i + 1) * batch_size]-127.5)/127.5
In [ ]:
# make prediction
# clip log-loss results to range(0.005,0.995)
predictions = model_InceptionV3_GAP.predict_generator(test_generator(X_test,100),steps=125, verbose=1)
predictions = predictions.clip(min=0.005,max=0.995)
output = pd.read_csv('sample_submission.csv')
print(len(predictions))
output.label = predictions
output.to_csv('InceptionV3_predictions.csv',index=None)